def parse_with_retrieved(self, retreived): """Parses the datafolder, stores results. This parser for this code ... """ from aiida.common.exceptions import InvalidOperation from aiida.common import aiidalogger # suppose at the start that the job is unsuccessful, unless proven otherwise successful = False # check whether the yambo calc was an initialisation (p2y) try: settings_dict = self._calc.inp.settings.get_dict() settings_dict = _uppercase_dict(settings_dict, dict_name='settings') except AttributeError: settings_dict = {} initialise = settings_dict.pop('INITIALISE', None) # select the folder object out_folder = self._calc.get_retrieved_node() # check what is inside the folder list_of_files = out_folder.get_folder_list() try: input_params = self._calc.inp.parameters.get_dict() except AttributeError: if not initialise: raise ParsingError("Input parameters not found!") else: input_params = {} # retrieve the cell: if parent_calc is a YamboCalculation we must find the original PwCalculation # going back through the graph tree. parent_calc = self._calc.inp.parent_calc_folder.inp.remote_folder cell = {} if isinstance(parent_calc, YamboCalculation): has_found_cell = False while (not has_found_cell): try: cell = parent_calc.inp.structure.cell has_found_cell = True except AttributeError: parent_calc = parent_calc.inp.parent_calc_folder.inp.remote_folder elif isinstance(parent_calc, PwCalculation): cell = self._calc.inp.parent_calc_folder.inp.remote_folder.inp.structure.cell output_params = {'warnings': [], 'errors': [], 'yambo_wrote': False} new_nodes_list = [] ndbqp = {} ndbhf = {} try: results = YamboFolder(out_folder.get_abs_path()) except Exception, e: success = False raise ParsingError("Unexpected behavior of YamboFolder: %s" % e)
def _generate_NEBinputdata(self,neb_parameters,settings_dict): """ This methods generate the input data for the NEB part of the calculation """ # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(neb_parameters.get_dict(), dict_name='parameters') input_params = {k: _lowercase_dict(v, dict_name=k) for k, v in input_params.iteritems()} # For the neb input there is no blocked keyword # Create an empty dictionary for the compulsory namelist 'PATH' # if not present if 'PATH' not in input_params: input_params['PATH'] = {} # In case of climbing image, we need the corresponding card climbing_image = False if input_params['PATH'].get('ci_scheme','no-ci').lower() in ['manual']: climbing_image = True try: climbing_image_list = settings_dict.pop("CLIMBING_IMAGES") except KeyError: raise InputValidationError("No climbing image specified for this calculation") if not isinstance(climbing_image_list, list): raise InputValidationError("Climbing images should be provided as a list") if [ i for i in climbing_image_list if i<2 or i >= input_params['PATH'].get('num_of_images',2)]: raise InputValidationError("The climbing images should be in the range between the first " "and the last image") climbing_image_card = "CLIMBING_IMAGES\n" climbing_image_card += ", ".join([str(_) for _ in climbing_image_list]) + "\n" inputfile = "" inputfile += "&PATH\n" # namelist content; set to {} if not present, so that we leave an # empty namelist namelist = input_params.pop('PATH', {}) for k, v in sorted(namelist.iteritems()): inputfile += convert_input_to_namelist_entry(k, v) inputfile += "/\n" # Write cards now if climbing_image: inputfile += climbing_image_card if input_params: raise InputValidationError( "The following namelists are specified in input_params, but are " "not valid namelists for the current type of calculation: " "{}".format(",".join(input_params.keys()))) return inputfile
def validate_input_parameters(self, input_nodes): """ Validate the parameters input node and create from it the input parameter dictionary that contains all the necessary namelists and their flags that should be written to the input file of the calculation :param input_nodes: dictionary of sanitized and validated input nodes :returns: input_parameters a dictionary with input namelists and their flags """ qpoints = input_nodes[self.get_linkname('qpoints')] parameters = input_nodes[self.get_linkname('parameters')].get_dict() # Transform first-level keys (i.e. namelist and card names) to uppercase and second-level to lowercase input_parameters = _uppercase_dict(parameters, dict_name='parameters') input_parameters = { k: _lowercase_dict(v, dict_name=k) for k, v in input_parameters.iteritems() } # Check that required namelists are present for namelist in self._compulsory_namelists: if not namelist in input_parameters: raise InputValidationError( "the required namelist '{}' was not defined".format( namelist)) # Check for presence of blocked keywords for namelist, flag in self._blocked_keywords: if namelist in input_parameters and flag in input_parameters[ namelist]: raise InputValidationError( "explicit definition of the '{}' " "flag in the '{}' namelist or card is not allowed".format( flag, namelist)) # Validate qpoint input node try: mesh, offset = qpoints.get_kpoints_mesh() except AttributeError: raise NotImplementedError( 'support for explicit qpoints is not implemented, only uniform meshes' ) if any([i != 0. for i in offset]): raise NotImplementedError( 'support for qpoint meshes with non-zero offsets is not implemented' ) input_parameters['INPUTHP']['iverbosity'] = 2 input_parameters['INPUTHP']['outdir'] = self._OUTPUT_SUBFOLDER input_parameters['INPUTHP']['prefix'] = self._PREFIX input_parameters['INPUTHP']['nq1'] = mesh[0] input_parameters['INPUTHP']['nq2'] = mesh[1] input_parameters['INPUTHP']['nq3'] = mesh[2] return input_parameters
def _prepare_pw(cls, folder, calculation): parameters = copy.deepcopy(cls.inputs.pw_parameters) parameters['CONTROL']['calculation'] = calculation parameters['SYSTEM']['nosym'] = True arguments = [ parameters, _uppercase_dict(cls.inputs.pw_settings.get_dict(), dict_name='settings'), cls.inputs.pseudos, cls.inputs.structure, ] input_filecontent, _ = Temp._generate_PWCPinputdata(*arguments) input_filename = getattr(cls, '_INPUT_PW_{}_FILE'.format(calculation.upper())) with folder.open(input_filename, 'w') as infile: infile.write(input_filecontent)
def _generate_input_files(cls, neb_parameters, settings_dict): """Generate the input data for the NEB part of the calculation.""" # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(neb_parameters.get_dict(), dict_name='parameters') input_params = {k: _lowercase_dict(v, dict_name=k) for k, v in input_params.items()} # Force default values for blocked keywords. NOTE: this is different from PW/CP for blocked in cls._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in input_params: if key in input_params[namelist]: raise InputValidationError( f"You cannot specify explicitly the '{key}' key in the '{namelist}' namelist." ) else: input_params[namelist] = {} input_params[namelist][key] = value # Create an empty dictionary for the compulsory namelist 'PATH' if not present if 'PATH' not in input_params: input_params['PATH'] = {} # In case of climbing image, we need the corresponding card ci_scheme = input_params['PATH'].get('ci_scheme', 'no-ci').lower() climbing_image_list = settings_dict.pop('CLIMBING_IMAGES', None) if ci_scheme == 'manual': manual_climbing_image = True if climbing_image_list is None: raise InputValidationError( "'ci_scheme' is {}, but no climbing images were specified for this " 'calculation.'.format(ci_scheme) ) if not isinstance(climbing_image_list, list): raise InputValidationError('Climbing images should be provided as a list.') num_of_images = input_params['PATH'].get('num_of_images', 2) if any([(i < 2 or i >= num_of_images) for i in climbing_image_list]): raise InputValidationError( 'The climbing images should be in the range between the first ' 'and the last image (excluded).' ) climbing_image_card = 'CLIMBING_IMAGES\n' climbing_image_card += ', '.join([str(_) for _ in climbing_image_list]) + '\n' else: manual_climbing_image = False if climbing_image_list is not None: raise InputValidationError(f"Climbing images are not accepted when 'ci_scheme' is {ci_scheme}.") input_data = '&PATH\n' # namelist content; set to {} if not present, so that we leave an empty namelist namelist = input_params.pop('PATH', {}) for key, value in sorted(namelist.items()): input_data += convert_input_to_namelist_entry(key, value) input_data += '/\n' # Write CI cards now if manual_climbing_image: input_data += climbing_image_card if input_params: raise InputValidationError( 'The following namelists are specified in input_params, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(input_params.keys()))) ) return input_data
def prepare_for_submission(self, folder): """Prepare the calculation job for submission by transforming input nodes into input files. In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that contains lists of files that need to be copied to the remote machine before job submission, as well as file lists that are to be retrieved after job completion. :param folder: a sandbox folder to temporarily write files on disk. :return: :py:`~aiida.common.datastructures.CalcInfo` instance. """ # pylint: disable=too-many-branches,too-many-statements import numpy as np local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] # Convert settings dictionary to have uppercase keys, or create an empty one if none was given. if 'settings' in self.inputs: settings_dict = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings_dict = {} first_structure = self.inputs.first_structure last_structure = self.inputs.last_structure # Check that the first and last image have the same cell if abs(np.array(first_structure.cell) - np.array(last_structure.cell)).max() > 1.e-4: raise InputValidationError('Different cell in the fist and last image') # Check that the first and last image have the same number of sites if len(first_structure.sites) != len(last_structure.sites): raise InputValidationError('Different number of sites in the fist and last image') # Check that sites in the initial and final structure have the same kinds if first_structure.get_site_kindnames() != last_structure.get_site_kindnames(): raise InputValidationError( 'Mismatch between the kind names and/or order between ' 'the first and final image' ) # Check that a pseudo potential was specified for each kind present in the `StructureData` # self.inputs.pw.pseudos is a plumpy.utils.AttributesFrozendict kindnames = [kind.name for kind in first_structure.kinds] if set(kindnames) != set(self.inputs.pw.pseudos.keys()): raise InputValidationError( 'Mismatch between the defined pseudos and the list of kinds of the structure.\nPseudos: {};\n' 'Kinds: {}'.format(', '.join(list(self.inputs.pw.pseudos.keys())), ', '.join(list(kindnames))) ) ############################## # END OF INITIAL INPUT CHECK # ############################## # Create the subfolder that will contain the pseudopotentials folder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True) # Create the subfolder for the output data (sometimes Quantum ESPRESSO codes crash if the folder does not exist) folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) # We first prepare the NEB-specific input file. neb_input_filecontent = self._generate_input_files(self.inputs.parameters, settings_dict) with folder.open(self.inputs.metadata.options.input_filename, 'w') as handle: handle.write(neb_input_filecontent) # We now generate the PW input files for each input structure local_copy_pseudo_list = [] for i, structure in enumerate([first_structure, last_structure]): # We need to a pass a copy of the settings_dict for each structure this_settings_dict = copy.deepcopy(settings_dict) pw_input_filecontent, this_local_copy_pseudo_list = PwCalculation._generate_PWCPinputdata( # pylint: disable=protected-access self.inputs.pw.parameters, this_settings_dict, self.inputs.pw.pseudos, structure, self.inputs.pw.kpoints ) local_copy_pseudo_list += this_local_copy_pseudo_list with folder.open(f'pw_{i + 1}.in', 'w') as handle: handle.write(pw_input_filecontent) # We need to pop the settings that were used in the PW calculations for key in list(settings_dict.keys()): if key not in list(this_settings_dict.keys()): settings_dict.pop(key) # We avoid to copy twice the same pseudopotential to the same filename local_copy_pseudo_list = set(local_copy_pseudo_list) # We check that two different pseudopotentials are not copied # with the same name (otherwise the first is overwritten) if len({filename for (uuid, filename, local_path) in local_copy_pseudo_list}) < len(local_copy_pseudo_list): raise InputValidationError('Same filename for two different pseudopotentials') local_copy_list += local_copy_pseudo_list # If present, add also the Van der Waals table to the pseudo dir. Note that the name of the table is not checked # but should be the one expected by Quantum ESPRESSO. vdw_table = self.inputs.get('pw.vdw_table', None) if vdw_table: local_copy_list.append( (vdw_table.uuid, vdw_table.filename, os.path.join(self._PSEUDO_SUBFOLDER, vdw_table.filename)) ) # operations for restart parent_calc_folder = self.inputs.get('parent_folder', None) symlink = settings_dict.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: if parent_calc_folder is not None: # I put the symlink to the old parent ./out folder remote_symlink_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), self._OUTPUT_SUBFOLDER, '*'), # asterisk: make individual symlinks for each file self._OUTPUT_SUBFOLDER )) # and to the old parent prefix.path remote_symlink_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), f'{self._PREFIX}.path'), f'{self._PREFIX}.path' )) else: # copy remote output dir and .path file, if specified if parent_calc_folder is not None: remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), self._OUTPUT_SUBFOLDER, '*'), self._OUTPUT_SUBFOLDER )) # and copy the old parent prefix.path remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), f'{self._PREFIX}.path'), f'{self._PREFIX}.path' )) # here we may create an aiida.EXIT file create_exit_file = settings_dict.pop('ONLY_INITIALIZATION', False) if create_exit_file: exit_filename = f'{self._PREFIX}.EXIT' with folder.open(exit_filename, 'w') as handle: handle.write('\n') calcinfo = CalcInfo() codeinfo = CodeInfo() calcinfo.uuid = self.uuid cmdline_params = settings_dict.pop('CMDLINE', []) calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # In neb calculations there is no input read from standard input!! codeinfo.cmdline_params = (['-input_images', '2'] + list(cmdline_params)) codeinfo.stdout_name = self.inputs.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo.codes_info = [codeinfo] # Retrieve the output files and the xml files calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.inputs.metadata.options.output_filename) calcinfo.retrieve_list.append(( os.path.join(self._OUTPUT_SUBFOLDER, self._PREFIX + '_*[0-9]', 'PW.out'), # source relative path (globbing) '.', # destination relative path 2 # depth to preserve )) for xml_filepath in self.xml_filepaths: # pylint: disable=not-an-iterable calcinfo.retrieve_list.append([xml_filepath, '.', 3]) calcinfo.retrieve_list += settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += self._internal_retrieve_list # We might still have parser options in the settings dictionary: pop them. _pop_parser_options(self, settings_dict) if settings_dict: unknown_keys = ', '.join(list(settings_dict.keys())) raise InputValidationError(f'`settings` contained unexpected keys: {unknown_keys}') return calcinfo
def prepare_for_submission(self, folder): """Create the input files from the input nodes passed to this instance of the `CalcJob`. :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk :return: `aiida.common.datastructures.CalcInfo` instance """ local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} parent_folder = self.inputs.parent_folder parent_calcs = parent_folder.get_incoming( node_class=orm.CalcJobNode).all() if not parent_calcs: raise exceptions.NotExistent( 'parent_folder<{}> has no parent calculation'.format( parent_folder.pk)) elif len(parent_calcs) > 1: raise exceptions.UniquenessError( 'parent_folder<{}> has multiple parent calculations'.format( parent_folder.pk)) parent_calc = parent_calcs[0].node # If the parent calculation is a `PhCalculation` we are restarting restart_flag = parent_calc.process_type == 'aiida.calculations:quantumespresso.ph' # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}' .format(parent_calc.computer.get_name())) # put by default, default_parent_output_folder = ./out try: default_parent_output_folder = parent_calc.process_class._OUTPUT_SUBFOLDER except AttributeError: try: default_parent_output_folder = parent_calc._get_output_folder() except AttributeError: raise exceptions.InputValidationError( 'parent calculation does not have a default output subfolder' ) parent_calc_out_subfolder = settings.pop('PARENT_CALC_OUT_SUBFOLDER', default_parent_output_folder) # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = { k: _lowercase_dict(v, dict_name=k) for k, v in six.iteritems(parameters) } prepare_for_d3 = settings.pop('PREPARE_FOR_D3', False) if prepare_for_d3: self._blocked_keywords += [('INPUTPH', 'fildrho'), ('INPUTPH', 'drho_star%open'), ('INPUTPH', 'drho_star%ext'), ('INPUTPH', 'drho_star%dir')] for namelist, flag in self._blocked_keywords: if namelist in parameters: if flag in parameters[namelist]: raise exceptions.InputValidationError( "Cannot specify explicitly the '{}' flag in the '{}' namelist or card." .format(flag, namelist)) if 'INPUTPH' not in parameters: raise exceptions.InputValidationError( 'required namelist INPUTPH not specified') parameters['INPUTPH']['outdir'] = self._OUTPUT_SUBFOLDER parameters['INPUTPH']['iverbosity'] = 1 parameters['INPUTPH']['prefix'] = self._PREFIX parameters['INPUTPH']['fildyn'] = self._OUTPUT_DYNAMICAL_MATRIX_PREFIX if prepare_for_d3: parameters['INPUTPH']['fildrho'] = self._DRHO_PREFIX parameters['INPUTPH']['drho_star%open'] = True parameters['INPUTPH']['drho_star%ext'] = self._DRHO_STAR_EXT parameters['INPUTPH']['drho_star%dir'] = self._FOLDER_DRHO try: mesh, offset = self.inputs.qpoints.get_kpoints_mesh() if any([i != 0. for i in offset]): raise NotImplementedError( 'Computation of phonons on a mesh with non zero offset is not implemented, at the level of ph.x' ) parameters['INPUTPH']['ldisp'] = True parameters['INPUTPH']['nq1'] = mesh[0] parameters['INPUTPH']['nq2'] = mesh[1] parameters['INPUTPH']['nq3'] = mesh[2] postpend_text = None except AttributeError: # this is the case where no mesh was set. Maybe it's a list try: list_of_points = self.inputs.qpoints.get_kpoints( cartesian=True) except AttributeError: # In this case, there are no info on the qpoints at all raise exceptions.InputValidationError( 'Input `qpoints` contains neither a mesh nor a list of points' ) # change to 2pi/a coordinates lattice_parameter = numpy.linalg.norm(self.inputs.qpoints.cell[0]) list_of_points *= lattice_parameter / (2. * numpy.pi) # add here the list of point coordinates if len(list_of_points) > 1: parameters['INPUTPH']['qplot'] = True parameters['INPUTPH']['ldisp'] = True postpend_text = u'{}\n'.format(len(list_of_points)) for points in list_of_points: postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f} 1\n'.format( *points) # Note: the weight is fixed to 1, because ph.x calls these # things weights but they are not such. If they are going to # exist with the meaning of weights, they will be supported else: parameters['INPUTPH']['ldisp'] = False postpend_text = u'' for points in list_of_points: postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f}\n'.format( *points) # customized namelists, otherwise not present in the distributed ph code try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " 'node, must be a list of strings') except KeyError: # list of namelists not specified in the settings; do automatic detection namelists_toprint = self._compulsory_namelists # create a folder for the dynamical matrices if not restart_flag: # if it is a restart, it will be copied over folder.get_subfolder(self._FOLDER_DYNAMICAL_MATRIX, create=True) with folder.open(self.metadata.options.input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write(u'&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(six.iteritems(namelist)): infile.write(convert_input_to_namelist_entry(key, value)) infile.write(u'/\n') # add list of qpoints if required if postpend_text is not None: infile.write(postpend_text) if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) # copy the parent scratch symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: # I create a symlink to each file/folder in the parent ./out folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), parent_calc_out_subfolder, '*'), self._OUTPUT_SUBFOLDER)) # I also create a symlink for the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) else: # here I copy the whole folder ./out remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) # I also copy the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) if restart_flag: # in this case, copy in addition also the dynamical matrices if symlink: remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), self._FOLDER_DYNAMICAL_MATRIX)) else: # copy the dynamical matrices # no need to copy the _ph0, since I copied already the whole ./out folder remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), '.')) # Create an `.EXIT` file if `only_initialization` flag in `settings` is set to `True` if settings.pop('ONLY_INITIALIZATION', False): with folder.open('{}.EXIT'.format(self._PREFIX), 'w') as handle: handle.write('\n') codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = ( list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename]) codeinfo.stdout_name = self.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.uuid = str(self.uuid) calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file and the xml file filepath_xml_tensor = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0', '{}.phsave'.format(self._PREFIX)) calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.metadata.options.output_filename) calcinfo.retrieve_list.append(self._FOLDER_DYNAMICAL_MATRIX) calcinfo.retrieve_list.append( os.path.join(filepath_xml_tensor, self._OUTPUT_XML_TENSOR_FILE_NAME)) calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', []) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError( '`settings` contained unexpected keys: {}'.format( unknown_keys)) return calcinfo
def prepare_for_submission(self, folder): # pylint: disable=too-many-statements,too-many-branches """Prepare the calculation job for submission by transforming input nodes into input files. In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that contains lists of files that need to be copied to the remote machine before job submission, as well as file lists that are to be retrieved after job completion. :param folder: a sandbox folder to temporarily write files on disk. :return: :py:`~aiida.common.datastructures.CalcInfo` instance. """ def test_offset(offset): """Check if the grid has an offset.""" if any([i != 0. for i in offset]): raise NotImplementedError( 'Computation of electron-phonon on a mesh with non zero offset is not implemented, ' 'at the level of epw.x') # pylint: disable=too-many-statements,too-many-branches local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} # Copy nscf folder parent_folder_nscf = self.inputs.parent_folder_nscf parent_calc_nscf = parent_folder_nscf.creator if parent_calc_nscf is None: raise exceptions.NotExistent('parent_folder<{}> has no parent calculation'.format(parent_folder_nscf.pk)) # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc_nscf.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}'.format( parent_calc_nscf.computer.get_name())) # put by default, default_parent_output_folder = ./out parent_calc_out_subfolder_nscf = parent_calc_nscf.process_class._OUTPUT_SUBFOLDER # pylint: disable=protected-access # Now phonon folder parent_folder_ph = self.inputs.parent_folder_ph parent_calc_ph = parent_folder_ph.creator # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc_ph.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}'.format( parent_calc_ph.computer.get_name())) # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()} if 'INPUTEPW' not in parameters: raise exceptions.InputValidationError('required namelist INPUTEPW not specified') parameters['INPUTEPW']['outdir'] = self._OUTPUT_SUBFOLDER parameters['INPUTEPW']['iverbosity'] = 1 parameters['INPUTEPW']['prefix'] = self._PREFIX try: mesh, offset = self.inputs.qpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nq1'] = mesh[0] parameters['INPUTEPW']['nq2'] = mesh[1] parameters['INPUTEPW']['nq3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the coarse q-point grid') from exception try: mesh, offset = self.inputs.kpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nk1'] = mesh[0] parameters['INPUTEPW']['nk2'] = mesh[1] parameters['INPUTEPW']['nk3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the coarse k-point grid') from exception try: mesh, offset = self.inputs.qfpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nqf1'] = mesh[0] parameters['INPUTEPW']['nqf2'] = mesh[1] parameters['INPUTEPW']['nqf3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the fine q-point grid') from exception try: mesh, offset = self.inputs.kfpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nkf1'] = mesh[0] parameters['INPUTEPW']['nkf2'] = mesh[1] parameters['INPUTEPW']['nkf3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the fine k-point grid') from exception # customized namelists, otherwise not present in the distributed epw code try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " 'node, must be a list of strings') except KeyError: # list of namelists not specified in the settings; do automatic detection namelists_toprint = self._compulsory_namelists # create the save folder with dvscf and dyn files. folder.get_subfolder(self._FOLDER_SAVE, create=True) # List of IBZ q-point to be added below EPW. To be removed when removed from EPW. qibz_ar = [] for key, value in sorted(parent_folder_ph.creator.outputs.output_parameters.get_dict().items()): if key.startswith('dynamical_matrix_'): qibz_ar.append(value['q_point']) qibz_node = orm.ArrayData() qibz_node.set_array('qibz', np.array(qibz_ar)) list_of_points = qibz_node.get_array('qibz') # Number of q-point in the irreducible Brillouin Zone. nqpt = len(list_of_points[0, :]) # add here the list of point coordinates if len(list_of_points) > 1: postpend_text = '{} cartesian\n'.format(len(list_of_points)) for points in list_of_points: postpend_text += '{0:18.10f} {1:18.10f} {2:18.10f} \n'.format(*points) with folder.open(self.metadata.options.input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write('&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(namelist.items()): infile.write(convert_input_to_namelist_entry(key, value)) infile.write('/\n') # add list of qpoints if required if postpend_text is not None: infile.write(postpend_text) if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) # copy the parent scratch symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: # I create a symlink to each file/folder in the parent ./out folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) remote_symlink_list.append(( parent_folder_nscf.computer.uuid, os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf, '*'), self._OUTPUT_SUBFOLDER )) else: # here I copy the whole folder ./out remote_copy_list.append(( parent_folder_nscf.computer.uuid, os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf), self._OUTPUT_SUBFOLDER )) prefix = self._PREFIX for iqpt in range(1, nqpt+1): label = str(iqpt) tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-0') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dyn_q0')) tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-'+label) remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dyn_q'+label)) if iqpt == 1: tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.dvscf*') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dvscf_q'+label)) tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.phsave') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/')) else: tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.q_'+label+'/'+prefix+'.dvscf*') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dvscf_q'+label)) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = (list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename]) codeinfo.stdout_name = self.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.metadata.options.output_filename) calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', []) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys)) return calcinfo
def _prepare_for_submission(self,tempfolder,inputdict): """ This is the routine to be called when you want to create the input files and related stuff with a plugin. :param tempfolder: a aiida.common.folders.Folder subclass where the plugin should put all its files. :param inputdict: a dictionary with the input nodes, as they would be returned by get_inputdata_dict (without the Code!) """ try: code = inputdict.pop(self.get_linkname('code')) except KeyError: raise InputValidationError("No code specified for this calculation") local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] try: parameters = inputdict.pop(self.get_linkname('parameters')) except KeyError: raise InputValidationError("No parameters specified for this calculation") if not isinstance(parameters, ParameterData): raise InputValidationError("parameters is not of type ParameterData") try: qpoints = inputdict.pop(self.get_linkname('qpoints')) except KeyError: raise InputValidationError("No qpoints specified for this calculation") if not isinstance(qpoints, KpointsData): raise InputValidationError("qpoints is not of type KpointsData") # Settings can be undefined, and defaults to an empty dictionary. # They will be used for any input that doen't fit elsewhere. settings = inputdict.pop(self.get_linkname('settings'),None) if settings is None: settings_dict = {} else: if not isinstance(settings, ParameterData): raise InputValidationError("settings, if specified, must be of " "type ParameterData") # Settings converted to uppercase settings_dict = _uppercase_dict(settings.get_dict(), dict_name='settings') parent_calc_folder = inputdict.pop(self.get_linkname('parent_folder'),None) if parent_calc_folder is None: raise InputValidationError("No parent calculation found, needed to " "compute phonons") # TODO: to be a PwCalculation is not sufficient: it could also be a nscf # calculation that is invalid for phonons if not isinstance(parent_calc_folder, RemoteData): raise InputValidationError("parent_calc_folder, if specified," "must be of type RemoteData") restart_flag = False # extract parent calculation parent_calcs = parent_calc_folder.get_inputs(node_type=JobCalculation) n_parents = len(parent_calcs) if n_parents != 1: raise UniquenessError("Input RemoteData is child of {} " "calculation{}, while it should have " "a single parent".format(n_parents, "" if n_parents==0 else "s")) parent_calc = parent_calcs[0] # check that it is a valid parent self._check_valid_parent(parent_calc) if not isinstance(parent_calc, PwCalculation): restart_flag = True # Also, the parent calculation must be on the same computer new_comp = self.get_computer() old_comp = parent_calc.get_computer() if ( not new_comp.uuid == old_comp.uuid ): raise InputValidationError("PhCalculation must be launched on the same computer" " of the parent: {}".format(old_comp.get_name())) # put by default, default_parent_output_folder = ./out try: default_parent_output_folder = parent_calc._OUTPUT_SUBFOLDER except AttributeError: try: default_parent_output_folder = parent_calc._get_output_folder() except AttributeError: raise InputValidationError("Parent of PhCalculation does not " "have a default output subfolder") #os.path.join( # parent_calc.OUTPUT_SUBFOLDER, # '{}.save'.format(parent_calc.PREFIX)) parent_calc_out_subfolder = settings_dict.pop('PARENT_CALC_OUT_SUBFOLDER', default_parent_output_folder) # Here, there should be no other inputs if inputdict: raise InputValidationError("The following input data nodes are " "unrecognized: {}".format(inputdict.keys())) ############################## # END OF INITIAL INPUT CHECK # ############################## # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(parameters.get_dict(), dict_name='parameters') input_params = {k: _lowercase_dict(v, dict_name=k) for k, v in input_params.iteritems()} prepare_for_d3 = settings_dict.pop('PREPARE_FOR_D3',False) if prepare_for_d3: self._blocked_keywords += [('INPUTPH', 'fildrho'), ('INPUTPH', 'drho_star%open'), ('INPUTPH', 'drho_star%ext'), ('INPUTPH', 'drho_star%dir')] # I remove unwanted elements (for the moment, instead, I stop; to change when # we setup a reasonable logging) for nl, flag in self._blocked_keywords: if nl in input_params: if flag in input_params[nl]: raise InputValidationError( "You cannot specify explicitly the '{}' flag in the '{}' " "namelist or card.".format(flag, nl)) # Set some variables (look out at the case! NAMELISTS should be uppercase, # internal flag names must be lowercase) if 'INPUTPH' not in input_params: raise InputValidationError("No namelist INPUTPH found in input") # I cannot decide what to do in the calculation input_params['INPUTPH']['outdir'] = self._OUTPUT_SUBFOLDER input_params['INPUTPH']['iverbosity'] = 1 # in human language 1=high input_params['INPUTPH']['prefix'] = self._PREFIX input_params['INPUTPH']['fildyn'] = self._OUTPUT_DYNAMICAL_MATRIX_PREFIX if prepare_for_d3: input_params['INPUTPH']['fildrho'] = self._DRHO_PREFIX input_params['INPUTPH']['drho_star%open'] = True input_params['INPUTPH']['drho_star%ext'] = self._DRHO_STAR_EXT input_params['INPUTPH']['drho_star%dir'] = self._FOLDER_DRHO # qpoints part try: mesh,offset = qpoints.get_kpoints_mesh() if any([i!=0. for i in offset]): raise NotImplementedError("Computation of phonons on a mesh with" " non zero offset is not implemented, at the level of ph.x") input_params["INPUTPH"]["ldisp"] = True input_params["INPUTPH"]["nq1"] = mesh[0] input_params["INPUTPH"]["nq2"] = mesh[1] input_params["INPUTPH"]["nq3"] = mesh[2] postpend_text = None except AttributeError: # this is the case where no mesh was set. Maybe it's a list try: list_of_points = qpoints.get_kpoints(cartesian=True) except AttributeError as e: # In this case, there are no info on the qpoints at all raise InputValidationError("Neither a qpoints mesh or a valid " "list of qpoints was found in input", e.message) # change to 2pi/a coordinates lattice_parameter = numpy.linalg.norm(qpoints.cell[0]) list_of_points *= lattice_parameter / (2.*numpy.pi) # add here the list of point coordinates if len(list_of_points)>1: input_params["INPUTPH"]["qplot"] = True input_params["INPUTPH"]["ldisp"] = True postpend_text = "{}\n".format(len(list_of_points)) for points in list_of_points: postpend_text += "{} {} {} 1\n".format(*points) # Note: the weight is fixed to 1, because ph.x calls these # things weights but they are not such. If they are going to # exist with the meaning of weights, they will be supported else: input_params["INPUTPH"]["ldisp"] = False postpend_text = "" for points in list_of_points: postpend_text += "{} {} {}\n".format(*points) # =================== NAMELISTS ======================== # customized namelists, otherwise not present in the distributed ph code try: namelists_toprint = settings_dict.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " "node, must be a list of strings") except KeyError: # list of namelists not specified in the settings; do automatic detection namelists_toprint = self._compulsory_namelists input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME) # create a folder for the dynamical matrices if not restart_flag: # if it is a restart, it will be copied over tempfolder.get_subfolder(self._FOLDER_DYNAMICAL_MATRIX, create=True) with open(input_filename,'w') as infile: infile.write('AiiDA calculation\n') for namelist_name in namelists_toprint: infile.write("&{0}\n".format(namelist_name)) # namelist content; set to {} if not present, so that we leave an # empty namelist namelist = input_params.pop(namelist_name,{}) for k, v in sorted(namelist.iteritems()): infile.write(get_input_data_text(k,v)) infile.write("/\n") # add list of qpoints if required if postpend_text is not None: infile.write(postpend_text) #TODO: write nat_todo if input_params: raise InputValidationError( "The following namelists are specified in input_params, but are " "not valid namelists for the current type of calculation: " "{}".format(",".join(input_params.keys()))) # copy the parent scratch symlink = settings_dict.pop('PARENT_FOLDER_SYMLINK', _default_symlink_usage) # a boolean if symlink: # I create a symlink to each file/folder in the parent ./out tempfolder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) remote_symlink_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder, "*"), self._OUTPUT_SUBFOLDER ) ) # I also create a symlink for the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_symlink_list.append((parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder() )) #pass else: # here I copy the whole folder ./out remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) # I also copy the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) if restart_flag: # in this case, copy in addition also the dynamical matrices if symlink: remote_symlink_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), self._FOLDER_DYNAMICAL_MATRIX)) else: # copy the dynamical matrices remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), '.')) # no need to copy the _ph0, since I copied already the whole ./out folder # here we may create an aiida.EXIT file create_exit_file = settings_dict.pop('ONLY_INITIALIZATION',False) if create_exit_file: exit_filename = tempfolder.get_abs_path( '{}.EXIT'.format(self._PREFIX)) with open(exit_filename,'w') as f: f.write('\n') calcinfo = CalcInfo() calcinfo.uuid = self.uuid # Empty command line by default cmdline_params = settings_dict.pop('CMDLINE', []) calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list codeinfo = CodeInfo() codeinfo.cmdline_params = (list(cmdline_params) + ["-in", self._INPUT_FILE_NAME]) codeinfo.stdout_name = self._OUTPUT_FILE_NAME codeinfo.code_uuid = code.uuid calcinfo.codes_info = [codeinfo] # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self._OUTPUT_FILE_NAME) calcinfo.retrieve_list.append(self._FOLDER_DYNAMICAL_MATRIX) calcinfo.retrieve_list.append( os.path.join(self._OUTPUT_SUBFOLDER, '_ph0', '{}.phsave'.format(self._PREFIX), self._OUTPUT_XML_TENSOR_FILE_NAME)) extra_retrieved = settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', []) for extra in extra_retrieved: calcinfo.retrieve_list.append( extra ) if settings_dict: raise InputValidationError("The following keys have been found in " "the settings input node, but were not understood: {}".format( ",".join(settings_dict.keys()))) return calcinfo
def _prepare_for_submission(self, tempfolder, inputdict): """ This is the routine to be called when you want to create the input files and related stuff with a plugin. :param tempfolder: a aiida.common.folders.Folder subclass where the plugin should put all its files. :param inputdict: a dictionary with the input nodes, as they would be returned by get_inputdata_dict (with the Code(s)!) """ local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] # Settings can be undefined, and defaults to an empty dictionary. # They will be used for any input that doen't fit elsewhere. settings = inputdict.pop(self.get_linkname('settings'), None) if settings is None: settings_dict = {} else: if not isinstance(settings, ParameterData): raise InputValidationError( "settings, if specified, must be of " "type ParameterData") # Settings converted to uppercase settings_dict = _uppercase_dict(settings.get_dict(), dict_name='settings') initialise = settings_dict.pop('INITIALISE', None) if initialise is not None: if not isinstance(initialise, bool): raise InputValidationError("INITIALISE must be " " a boolean") try: parameters = inputdict.pop(self.get_linkname('parameters')) except KeyError: if not initialise: raise InputValidationError( "No parameters specified for this calculation") else: pass if not initialise: if not isinstance(parameters, ParameterData): raise InputValidationError( "parameters is not of type ParameterData") parent_calc_folder = inputdict.pop(self.get_linkname('parent_folder'), None) if parent_calc_folder is None: raise InputValidationError( "No parent calculation found, it is needed to " "use Yambo") if not isinstance(parent_calc_folder, RemoteData): raise InputValidationError("parent_calc_folder must be of" " type RemoteData") main_code = inputdict.pop(self.get_linkname('code'), None) if main_code is None: raise InputValidationError("No input code found!") preproc_code = inputdict.pop(self.get_linkname('preprocessing_code'), None) if preproc_code is not None: if not isinstance(preproc_code, Code): raise InputValidationError("preprocessing_code, if specified," "must be of type Code") parent_calc = parent_calc_folder.get_inputs_dict( link_type=LinkType.CREATE)['remote_folder'] yambo_parent = isinstance(parent_calc, YamboCalculation) # flags for yambo interfaces try: precode_parameters = inputdict.pop( self.get_linkname('precode_parameters')) except KeyError: precode_parameters = ParameterData(dict={}) if not isinstance(precode_parameters, ParameterData): raise InputValidationError('precode_parameters is not ' 'of type ParameterData') precode_param_dict = precode_parameters.get_dict() # check the precode parameters given in input input_cmdline = settings_dict.pop('CMDLINE', None) import re precode_params_list = [] pattern = re.compile(r"(^\-)([a-zA-Z])") for key, value in precode_param_dict.iteritems(): if re.search(pattern, key) is not None: if key == '-O' or key == '-H' or key == '-h' or key == '-F': raise InputValidationError( "Precode flag {} is not allowed".format(str(key))) else: if precode_param_dict[key] is True: precode_params_list.append(str(key)) elif precode_param_dict[key] is False: pass else: precode_params_list.append('{}'.format(str(key))) precode_params_list.append('{}'.format(str(value))) else: raise InputValidationError( "Wrong format of precode_parameters") # Adding manual cmdline input (e.g. for DB fragmentation) if input_cmdline is not None: precode_params_list = precode_params_list + input_cmdline # TODO: check that remote data must be on the same computer ############################## # END OF INITIAL INPUT CHECK # ############################## if not initialise: ################################################### # Prepare yambo input file ################################################### params_dict = parameters.get_dict() # extract boolean keys boolean_dict = { k: v for k, v in params_dict.iteritems() if isinstance(v, bool) } params_dict = { k: v for k, v in params_dict.iteritems() if k not in boolean_dict.keys() } # reorganize the dictionary and create a list of dictionaries with key, value and units parameters_list = [] for k, v in params_dict.iteritems(): if "_units" in k: continue units_key = "{}_units".format(k) try: units = params_dict[units_key] except KeyError: units = None this_dict = {} this_dict['key'] = k this_dict['value'] = v this_dict['units'] = units parameters_list.append(this_dict) input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME) with open(input_filename, 'w') as infile: infile.write(self._LOGOSTRING) for k, v in boolean_dict.iteritems(): if v: infile.write("{}\n".format(k)) for this_dict in parameters_list: key = this_dict['key'] value = this_dict['value'] units = this_dict['units'] if isinstance(value, (tuple, list)): # write the input flags for the Drude term and for the parallelization options of vers. 4 # (it can be implemented in a better way) if key.startswith('DrudeW'): value_string = " ( " + ",".join( [str(_) for _ in value]) + " )" the_string = "{} = {}".format(key, value_string) the_string += " {}".format(units) infile.write(the_string + "\n") continue if key == 'SE_CPU': value_string = " \" " + " ".join( [str(_) for _ in value]) + " \" " the_string = "{} = {}".format(key, value_string) infile.write("SE_ROLEs = \" q qp b \" " + "\n") infile.write(the_string + "\n") continue if key == 'X_all_q_CPU': value_string = " \" " + " ".join( [str(_) for _ in value]) + " \" " the_string = "{} = {}".format(key, value_string) infile.write("X_all_q_ROLEs = \" q k c v \" " + "\n") infile.write(the_string + "\n") continue if key == 'X_finite_q_CPU': value_string = " \" " + " ".join( [str(_) for _ in value]) + " \" " the_string = "{} = {}".format(key, value_string) infile.write("X_finite_q_ROLEs = \" q k c v \" " + "\n") infile.write(the_string + "\n") continue if key == 'X_q_0_CPU': value_string = " \" " + " ".join( [str(_) for _ in value]) + " \" " the_string = "{} = {}".format(key, value_string) infile.write("X_q_0_ROLEs = \" k c v \" " + "\n") infile.write(the_string + "\n") continue if key == 'QPkrange' or key == 'QPerange': value_string = '' for v in value: value_string += " | ".join([str(_) for _ in v ]) + " |\n" the_string = "% {}\n {}".format(key, value_string) the_string += "%" infile.write(the_string + "\n") continue value_string = " | ".join([str(_) for _ in value]) + " |" the_string = "% {}\n {}".format(key, value_string) if units is not None: the_string += " {}".format(units) the_string += "\n%" else: the_value = '"{}"'.format(value) if isinstance( value, basestring) else '{}'.format(value) the_string = "{} = {}".format(key, the_value) if units is not None: the_string += " {}".format(units) infile.write(the_string + "\n") ############################################ # set copy of the parent calculation ############################################ parent_calcs = parent_calc_folder.get_inputs(link_type=LinkType.CREATE) if len(parent_calcs) > 1: raise UniquenessError( "More than one parent totalenergy calculation" "has been found for parent_calc_folder {}".format( parent_calc_folder)) if len(parent_calcs) == 0: raise InputValidationError( "No parent calculation associated with parent_folder {}". format(parent_calc_folder)) parent_calc = parent_calcs[0] if yambo_parent: try: parent_settings = _uppercase_dict( parent_calc.inp.settings.get_dict(), dict_name='parent settings') parent_initialise = parent_settings['INITIALISE'] except KeyError: parent_initialise = False if yambo_parent: remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), "SAVE"), "SAVE/")) if not parent_initialise: cancopy = False if parent_calc.get_state() == calc_states.FINISHED: cancopy = True if 'yambo_wrote' in parent_calc.get_outputs_dict( )['output_parameters'].get_dict().keys(): if parent_calc.get_outputs_dict( )['output_parameters'].get_dict()['yambo_wrote'] == True: cancopy = True if parent_calc.get_outputs_dict( )['output_parameters'].get_dict()['yambo_wrote'] == False: cancopy = False if cancopy: remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), "aiida"), "aiida/")) else: remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), PwCalculation._OUTPUT_SUBFOLDER, "{}.save".format(parent_calc._PREFIX), "*"), ".")) ############################################ # set Calcinfo ############################################ calcinfo = CalcInfo() calcinfo.uuid = self.uuid calcinfo.local_copy_list = [] calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = [] # remote_symlink_list # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append('r*') calcinfo.retrieve_list.append('l*') calcinfo.retrieve_list.append('o*') calcinfo.retrieve_list.append('LOG/l-*_CPU_1') extra_retrieved = settings_dict.pop( 'ADDITIONAL_RETRIEVE_LIST', ['aiida/ndb.QP', 'aiida/ndb.HF_and_locXC']) for extra in extra_retrieved: calcinfo.retrieve_list.append(extra) from aiida.common.datastructures import code_run_modes, CodeInfo # c1 = interface dft codes and yambo (ex. p2y or a2y) c1 = CodeInfo() c1.withmpi = True c1.cmdline_params = precode_params_list # c2 = yambo initialization c2 = CodeInfo() c2.withmpi = True c2.cmdline_params = [] c2.code_uuid = main_code.uuid # if the parent calculation is a yambo calculation skip the interface (c1) and the initialization (c2) if yambo_parent: c1 = None if not parent_initialise: c2 = None else: c1.cmdline_params = precode_params_list c1.code_uuid = preproc_code.uuid # c3 = yambo calculation c3 = CodeInfo() c3.withmpi = self.get_withmpi() c3.cmdline_params = [ "-F", self._INPUT_FILE_NAME, '-J', self._OUTPUT_FILE_NAME ] c3.code_uuid = main_code.uuid if initialise: c2 = None c3 = None #calcinfo.codes_info = [c1, c2, c3] if not yambo_parent else [c3] if yambo_parent: if not parent_initialise: calcinfo.codes_info = [c3] else: calcinfo.codes_info = [c2, c3] elif initialise: calcinfo.codes_info = [c1] else: calcinfo.codes_info = [c1, c2, c3] calcinfo.codes_run_mode = code_run_modes.SERIAL if settings_dict: raise InputValidationError( "The following keys have been found in " "the settings input node, but were not understood: {}".format( ",".join(settings_dict.keys()))) return calcinfo
def _prepare_for_submission(self, tempfolder, inputdict): """ This is the routine to be called when you want to create the input files and related stuff with a plugin. :param tempfolder: a aiida.common.folders.Folder subclass where the plugin should put all its files. :param inputdict: a dictionary with the input nodes, as they would be returned by get_inputdata_dict (without the Code!) """ local_copy_list = [] remote_copy_list = [] try: code = inputdict.pop(self.get_linkname('code')) except KeyError: raise InputValidationError( "No code specified for this calculation") try: parameters = inputdict.pop(self.get_linkname('parameters')) except KeyError: raise InputValidationError( "No parameters specified for this calculation") if not isinstance(parameters, ParameterData): raise InputValidationError( "parameters is not of type ParameterData") # Settings can be undefined, and defaults to an empty dictionary settings = inputdict.pop(self.get_linkname('settings'), None) if settings is None: settings_dict = {} else: if not isinstance(settings, ParameterData): raise InputValidationError( "settings, if specified, must be of " "type ParameterData") # Settings converted to uppercase settings_dict = _uppercase_dict(settings.get_dict(), dict_name='settings') parent_calc_folder = inputdict.pop(self.get_linkname('parent_folder'), None) if parent_calc_folder is not None: if not isinstance(parent_calc_folder, self._parent_folder_type): if not isinstance(self._parent_folder_type, tuple): possible_types = [self._parent_folder_type.__name__] else: possible_types = [ t.__name__ for t in self._parent_folder_type ] raise InputValidationError("parent_calc_folder, if specified," "must be of type {}".format( " or ".join(possible_types))) following_text = self._get_following_text(inputdict, settings) # Here, there should be no more parameters... if inputdict: raise InputValidationError("The following input data nodes are " "unrecognized: {}".format( inputdict.keys())) ############################## # END OF INITIAL INPUT CHECK # ############################## # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(parameters.get_dict(), dict_name='parameters') input_params = { k: _lowercase_dict(v, dict_name=k) for k, v in input_params.iteritems() } # set default values. NOTE: this is different from PW/CP for blocked in self._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in input_params: if key in input_params[namelist]: raise InputValidationError( "You cannot specify explicitly the '{}' key in the '{}' " "namelist.".format(key, namelist)) # set to a default if not input_params[namelist]: input_params[namelist] = {} input_params[namelist][key] = value # =================== NAMELISTS AND CARDS ======================== try: namelists_toprint = settings_dict.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " "node, must be a list of strings") except KeyError: # list of namelists not specified; do automatic detection namelists_toprint = self._default_namelists input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME) with open(input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write("&{0}\n".format(namelist_name)) # namelist content; set to {} if not present, so that we leave an # empty namelist namelist = input_params.pop(namelist_name, {}) for k, v in sorted(namelist.iteritems()): infile.write(convert_input_to_namelist_entry(k, v)) infile.write("/\n") # Write remaning text now, if any infile.write(following_text) # Check for specified namelists that are not expected if input_params: raise InputValidationError( "The following namelists are specified in input_params, but are " "not valid namelists for the current type of calculation: " "{}".format(",".join(input_params.keys()))) # copy remote output dir, if specified if parent_calc_folder is not None: if isinstance(parent_calc_folder, RemoteData): parent_calc_out_subfolder = settings_dict.pop( 'PARENT_CALC_OUT_SUBFOLDER', self._INPUT_SUBFOLDER) remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) elif isinstance(parent_calc_folder, FolderData): local_copy_list.append( (parent_calc_folder.get_abs_path(self._INPUT_SUBFOLDER), self._OUTPUT_SUBFOLDER)) elif isinstance(parent_calc_folder, SinglefileData): filename = parent_calc_folder.get_file_abs_path() local_copy_list.append((filename, os.path.basename(filename))) calcinfo = CalcInfo() calcinfo.uuid = self.uuid # Empty command line by default calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list codeinfo = CodeInfo() codeinfo.cmdline_params = settings_dict.pop('CMDLINE', []) codeinfo.stdin_name = self._INPUT_FILE_NAME codeinfo.stdout_name = self._OUTPUT_FILE_NAME codeinfo.code_uuid = code.uuid calcinfo.codes_info = [codeinfo] # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self._OUTPUT_FILE_NAME) settings_retrieve_list = settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += settings_retrieve_list calcinfo.retrieve_list += self._internal_retrieve_list calcinfo.retrieve_singlefile_list = self._retrieve_singlefile_list if settings_dict: try: Parserclass = self.get_parserclass() parser = Parserclass(self) parser_opts = parser.get_parser_settings_key() settings_dict.pop(parser_opts) except ( KeyError, AttributeError ): # the key parser_opts isn't inside the dictionary, or it is set to None raise InputValidationError( "The following keys have been found in " "the settings input node, but were not understood: {}". format(",".join(settings_dict.keys()))) return calcinfo
def prepare_for_submission(self, folder): # pylint: disable=too-many-branches,too-many-statements """Prepare the calculation job for submission by transforming input nodes into input files. In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that contains lists of files that need to be copied to the remote machine before job submission, as well as file lists that are to be retrieved after job completion. :param folder: a sandbox folder to temporarily write files on disk. :return: :py:`~aiida.common.datastructures.CalcInfo` instance. """ # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()} # Same for settings. if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} # Set default values. NOTE: this is different from PW/CP for blocked in self._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in parameters: if key in parameters[namelist]: raise exceptions.InputValidationError( "You cannot specify explicitly the '{}' key in the '{}' " 'namelist.'.format(key, namelist)) else: parameters[namelist] = {} parameters[namelist][key] = value # Restrict the plot output to the file types that we want to be able to parse dimension_to_output_format = { 0: 0, # Spherical integration -> Gnuplot, 1D 1: 0, # 1D -> Gnuplot, 1D 2: 7, # 2D -> Gnuplot, 2D 3: 6, # 3D -> Gaussian cube 4: 0, # Polar on a sphere -> # Gnuplot, 1D } parameters['PLOT']['output_format'] = dimension_to_output_format[parameters['PLOT']['iflag']] namelists_toprint = self._default_namelists input_filename = self.inputs.metadata.options.input_filename with folder.open(input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write('&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(namelist.items()): infile.write(convert_input_to_namelist_entry(key, value)) infile.write('/\n') # Check for specified namelists that are not expected if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) remote_copy_list = [] local_copy_list = [] # Copy remote output dir parent_calc_folder = self.inputs.get('parent_folder', None) if isinstance(parent_calc_folder, orm.RemoteData): remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), self._INPUT_SUBFOLDER), self._OUTPUT_SUBFOLDER )) remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), self._PSEUDO_SUBFOLDER), self._PSEUDO_SUBFOLDER )) elif isinstance(parent_calc_folder, orm.FolderData): for filename in parent_calc_folder.list_object_names(): local_copy_list.append(( parent_calc_folder.uuid, filename, os.path.join(self._OUTPUT_SUBFOLDER, filename) )) local_copy_list.append(( parent_calc_folder.uuid, filename, os.path.join(self._PSEUDO_SUBFOLDER, filename) )) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = settings.pop('CMDLINE', []) codeinfo.stdin_name = self.inputs.metadata.options.input_filename codeinfo.stdout_name = self.inputs.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list # Retrieve by default the output file calcinfo.retrieve_list = [self.inputs.metadata.options.output_filename] calcinfo.retrieve_temporary_list = [] # Depending on the `plot_num` and the corresponding parameters, more than one pair of `filplot` + `fileout` # files may be written. In that case, the data files will have `filplot` as a prefix with some suffix to # distinguish them from one another. The `fileout` filename will be the full data filename with the `fileout` # value as a suffix. retrieve_tuples = [ self._FILEOUT, ('{}_*{}'.format(self._FILPLOT, self._FILEOUT), '.', 0) ] if self.inputs.metadata.options.keep_plot_file: calcinfo.retrieve_list.extend(retrieve_tuples) else: calcinfo.retrieve_temporary_list.extend(retrieve_tuples) return calcinfo
def prepare_for_submission(self, folder): """Create the input files from the input nodes passed to this instance of the `CalcJob`. :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk :return: `aiida.common.datastructures.CalcInfo` instance """ if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} following_text = self._get_following_text() # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase if 'parameters' in self.inputs: parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = { k: _lowercase_dict(v, dict_name=k) for k, v in six.iteritems(parameters) } else: parameters = {} # Force default values for blocked keywords. NOTE: this is different from PW/CP for blocked in self._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in parameters: if key in parameters[namelist]: raise exceptions.InputValidationError( "You cannot specify explicitly the '{}' key in the '{}' " 'namelist.'.format(key, namelist)) else: parameters[namelist] = {} parameters[namelist][key] = value # =================== NAMELISTS AND CARDS ======================== try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input node, must be a list of strings" ) except KeyError: # list of namelists not specified; do automatic detection namelists_toprint = self._default_namelists input_filename = self.inputs.metadata.options.input_filename with folder.open(input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write(u'&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(six.iteritems(namelist)): infile.write(convert_input_to_namelist_entry(key, value)) infile.write(u'/\n') # Write remaning text now, if any infile.write(following_text) # Check for specified namelists that are not expected if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) remote_copy_list = [] local_copy_list = [] # copy remote output dir, if specified parent_calc_folder = self.inputs.get('parent_folder', None) if parent_calc_folder is not None: if isinstance(parent_calc_folder, RemoteData): parent_calc_out_subfolder = settings.pop( 'PARENT_CALC_OUT_SUBFOLDER', self._INPUT_SUBFOLDER) remote_copy_list.append( (parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) elif isinstance(parent_calc_folder, FolderData): # TODO: test me, especially with deep relative paths. for filename in parent_calc_folder.list_object_names(): local_copy_list.append( (parent_calc_folder.uuid, filename, os.path.join(self._OUTPUT_SUBFOLDER, filename))) elif isinstance(parent_calc_folder, SinglefileData): # TODO: test me single_file = parent_calc_folder local_copy_list.append((single_file.uuid, single_file.filename, single_file.filename)) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = settings.pop('CMDLINE', []) codeinfo.stdin_name = self.inputs.metadata.options.input_filename codeinfo.stdout_name = self.inputs.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.uuid = str(self.uuid) calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append( self.inputs.metadata.options.output_filename) settings_retrieve_list = settings.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += settings_retrieve_list calcinfo.retrieve_list += self._internal_retrieve_list calcinfo.retrieve_singlefile_list = self._retrieve_singlefile_list # We might still have parser options in the settings dictionary: pop them. _pop_parser_options(self, settings) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError( '`settings` contained unexpected keys: {}'.format( unknown_keys)) return calcinfo
def prepare_for_submission(self, folder): """Prepare the calculation job for submission by transforming input nodes into input files. In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that contains lists of files that need to be copied to the remote machine before job submission, as well as file lists that are to be retrieved after job completion. :param folder: a sandbox folder to temporarily write files on disk. :return: :py:`~aiida.common.datastructures.CalcInfo` instance. """ # pylint: disable=too-many-branches,too-many-statements if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} following_text = self._get_following_text() # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase if 'parameters' in self.inputs: parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = { k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items() } else: parameters = {} # =================== NAMELISTS AND CARDS ======================== try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input node, must be a list of strings" ) except KeyError: # list of namelists not specified; do automatic detection namelists_toprint = self._default_namelists parameters = self.set_blocked_keywords(parameters) parameters = self.filter_namelists(parameters, namelists_toprint) file_content = self.generate_input_file(parameters) file_content += '\n' + following_text input_filename = self.inputs.metadata.options.input_filename with folder.open(input_filename, 'w') as infile: infile.write(file_content) symlink = settings.pop('PARENT_FOLDER_SYMLINK', False) remote_copy_list = [] local_copy_list = [] remote_symlink_list = [] ptr = remote_symlink_list if symlink else remote_copy_list # copy remote output dir, if specified parent_calc_folder = self.inputs.get('parent_folder', None) if parent_calc_folder is not None: if isinstance(parent_calc_folder, RemoteData): parent_calc_out_subfolder = settings.pop( 'PARENT_CALC_OUT_SUBFOLDER', self._INPUT_SUBFOLDER) ptr.append((parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) elif isinstance(parent_calc_folder, FolderData): for filename in parent_calc_folder.list_object_names(): local_copy_list.append( (parent_calc_folder.uuid, filename, os.path.join(self._OUTPUT_SUBFOLDER, filename))) elif isinstance(parent_calc_folder, SinglefileData): single_file = parent_calc_folder local_copy_list.append((single_file.uuid, single_file.filename, single_file.filename)) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = settings.pop('CMDLINE', []) codeinfo.stdin_name = self.inputs.metadata.options.input_filename codeinfo.stdout_name = self.inputs.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.uuid = str(self.uuid) calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append( self.inputs.metadata.options.output_filename) settings_retrieve_list = settings.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += settings_retrieve_list calcinfo.retrieve_list += self._internal_retrieve_list calcinfo.retrieve_singlefile_list = self._retrieve_singlefile_list # We might still have parser options in the settings dictionary: pop them. _pop_parser_options(self, settings) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError( f'`settings` contained unexpected keys: {unknown_keys}') return calcinfo
def parse(self, retrieved, **kwargs): """Parses the datafolder, stores results. This parser for this code ... """ from aiida.common.exceptions import InvalidOperation from aiida.common import exceptions from aiida.common import AIIDA_LOGGER # suppose at the start that the job is unsuccess, unless proven otherwise success = False # check whether the yambo calc was an initialisation (p2y) try: settings_dict = self._calc.inputs.settings.get_dict() settings_dict = _uppercase_dict(settings_dict, dict_name='settings') except AttributeError: settings_dict = {} initialise = settings_dict.pop('INITIALISE', None) # select the folder object try: out_folder = self.retrieved except exceptions.NotExistent: return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER try: input_params = self._calc.inputs.parameters.get_dict() except AttributeError: if not initialise: raise ParsingError("Input parameters not found!") else: input_params = {} # retrieve the cell: if parent_calc is a YamboCalculation we must find the original PwCalculation # going back through the graph tree. parent_calc = find_pw_parent(self._calc) cell = parent_calc.inputs.structure.cell output_params = {'warnings': [], 'errors': [], 'yambo_wrote_dbs': False, 'game_over': False, 'p2y_completed': False, 'last_time':0,\ 'requested_time':self._calc.attributes['max_wallclock_seconds'], 'time_units':'seconds',\ 'memstats':[], 'para_error':False, 'memory_error':False,'timing':[],'time_error': False, 'has_gpu': False, 'yambo_version':'4.5', 'Fermi(eV)':0} ndbqp = {} ndbhf = {} try: results = YamboFolder(out_folder._repository._repo_folder.abspath) except Exception as e: success = False return self.exit_codes.PARSER_ANOMALY #raise ParsingError("Unexpected behavior of YamboFolder: %s" % e) for file in os.listdir(out_folder._repository._repo_folder.abspath): if 'stderr' in file: with open(file, 'r') as stderr: parse_scheduler_stderr(stderr, output_params) for result in results.yambofiles: if results is None: continue #This should be automatic in yambopy... if result.type == 'log': parse_log(result, output_params) if result.type == 'report': parse_report(result, output_params) if 'eel' in result.filename: eels_array = self._aiida_array(result.data) self.out(self._eels_array_linkname, eels_array) elif 'eps' in result.filename: eps_array = self._aiida_array(result.data) self.out(self._eps_array_linkname, eps_array) elif 'alpha' in result.filename: alpha_array = self._aiida_array(result.data) self.out(self._alpha_array_linkname, alpha_array) elif 'ndb.QP' == result.filename: ndbqp = copy.deepcopy(result.data) elif 'ndb.HF_and_locXC' == result.filename: ndbhf = copy.deepcopy(result.data) elif 'gw0___' in input_params: if self._aiida_bands_data(result.data, cell, result.kpoints): arr = self._aiida_bands_data(result.data, cell, result.kpoints) if type( arr ) == BandsData: # ArrayData is not BandsData, but BandsData is ArrayData self.out(self._quasiparticle_bands_linkname, arr) if type(arr) == ArrayData: # self.out(self._qp_array_linkname, arr) elif 'life___' in input_params: if self._aiida_bands_data(result.data, cell, result.kpoints): arr = self._aiida_bands_data(result.data, cell, result.kpoints) if type(arr) == BandsData: self.out(self._alpha_array_linkname + '_bands', arr) elif type(arr) == ArrayData: self.out(self._alpha_array_linkname + '_arr', arr) yambo_wrote_dbs(output_params) # we store all the information from the ndb.* files rather than in separate files # if possible, else we default to separate files. if ndbqp and ndbhf: # self.out(self._ndb_linkname, self._sigma_c(ndbqp, ndbhf)) else: if ndbqp: self.out(self._ndb_QP_linkname, self._aiida_ndb_qp(ndbqp)) if ndbhf: self.out(self._ndb_HF_linkname, self._aiida_ndb_hf(ndbhf)) if output_params['game_over']: success = True elif output_params['p2y_completed'] and initialise: success = True #last check on time delta_time = (float(output_params['requested_time'])-float(output_params['last_time'])) \ / float(output_params['requested_time']) if success == False: if delta_time > -2 and delta_time < 0.1: output_params['time_error'] = True params = Dict(dict=output_params) self.out(self._parameter_linkname, params) # output_parameters if success == False: if 'time_most_prob' in output_params['errors']: return self.exit_codes.WALLTIME_ERROR elif output_params['para_error']: return self.exit_codes.PARA_ERROR elif output_params[ 'memory_error'] and 'X_par_allocation' in output_params[ 'errors']: return self.exit_codes.X_par_MEMORY_ERROR elif output_params['memory_error']: return self.exit_codes.MEMORY_ERROR elif output_params['time_error']: return self.exit_codes.WALLTIME_ERROR else: return self.exit_codes.NO_SUCCESS
def parse_namelists(txt): """ Parse txt to extract a dictionary of the namelist info. :param txt: A single string containing the QE input text to be parsed. :type txt: str :returns: A nested dictionary of the namelists and their key-value pairs. The namelists will always be upper-case keys, while the parameter keys will always be lower-case. For example:: {"CONTROL": {"calculation": "bands", "prefix": "al", "pseudo_dir": "./pseudo", "outdir": "./out"}, "ELECTRONS": {"diagonalization": "cg"}, "SYSTEM": {"nbnd": 8, "ecutwfc": 15.0, "celldm(1)": 7.5, "ibrav": 2, "nat": 1, "ntyp": 1} } :raises aiida.common.exceptions.ParsingError: if there are issues parsing the input. """ # TODO: Incorporate support for algebraic expressions? # Define the re to match a namelist and extract the info from it. namelist_re = re.compile( r""" ^ [ \t]* &(\S+) [ \t]* $\n # match line w/ nmlst tag; save nmlst name ( [\S\s]*? # match any line non-greedily ) # save the group of text between nmlst ^ [ \t]* / [ \t]* $\n # match line w/ "/" as only non-whitespace char """, re.M | re.X) # Define the re to match and extract all of the key = val pairs inside # a block of namelist text. key_value_re = re.compile( r""" [ \t]* (\S+?) [ \t]* # match and store key = # equals sign separates key and value [ \t]* (\S+?) [ \t]* # match and store value [\n,] # return or comma separates "key = value" pairs """, re.M | re.X) # Scan through the namelists... params_dict = {} for nmlst, blockstr in namelist_re.findall(txt): # ...extract the key value pairs, storing them each in nmlst_dict,... nmlst_dict = {} for key, valstr in key_value_re.findall(blockstr): nmlst_dict[key.lower()] = str2val(valstr) # ...and, store nmlst_dict as a value in params_dict with the namelist # as the key. if len(nmlst_dict.keys()) > 0: params_dict[nmlst.upper()] = nmlst_dict if len(params_dict) == 0: raise ParsingError( 'No data was found while parsing the namelist in the following ' 'text\n' + txt) return _uppercase_dict(params_dict, "params_dict")
def _prepare_for_submission(self,tempfolder,inputdict): """ This is the routine to be called when you want to create the input files and related stuff with a plugin. :param tempfolder: a aiida.common.folders.Folder subclass where the plugin should put all its files. :param inputdict: a dictionary with the input nodes, as they would be returned by get_inputdata_dict (without the Code!) """ import numpy as np local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] try: code = inputdict.pop(self.get_linkname('code')) except KeyError: raise InputValidationError("No code specified for this calculation") try: pw_parameters = inputdict.pop(self.get_linkname('pw_parameters')) except KeyError: raise InputValidationError("No PW parameters specified for this calculation") if not isinstance(pw_parameters, ParameterData): raise InputValidationError("PW parameters is not of type ParameterData") try: neb_parameters = inputdict.pop(self.get_linkname('neb_parameters')) except KeyError: raise InputValidationError("No NEB parameters specified for this calculation") if not isinstance(neb_parameters, ParameterData): raise InputValidationError("NEB parameters is not of type ParameterData") try: first_structure = inputdict.pop(self.get_linkname('first_structure')) except KeyError: raise InputValidationError("No initial structure specified for this calculation") if not isinstance(first_structure, StructureData): raise InputValidationError("Initial structure is not of type StructureData") try: last_structure = inputdict.pop(self.get_linkname('last_structure')) except KeyError: raise InputValidationError("No final structure specified for this calculation") if not isinstance(last_structure, StructureData): raise InputValidationError("Final structure is not of type StructureData") try: kpoints = inputdict.pop(self.get_linkname('kpoints')) except KeyError: raise InputValidationError("No kpoints specified for this calculation") if not isinstance(kpoints, KpointsData): raise InputValidationError("kpoints is not of type KpointsData") # Settings can be undefined, and defaults to an empty dictionary settings = inputdict.pop(self.get_linkname('settings'),None) if settings is None: settings_dict = {} else: if not isinstance(settings, ParameterData): raise InputValidationError("settings, if specified, must be of " "type ParameterData") # Settings converted to uppercase settings_dict = _uppercase_dict(settings.get_dict(), dict_name='settings') pseudos = {} # I create here a dictionary that associates each kind name to a pseudo for link in inputdict.keys(): if link.startswith(self._get_linkname_pseudo_prefix()): kindstring = link[len(self._get_linkname_pseudo_prefix()):] kinds = kindstring.split('_') the_pseudo = inputdict.pop(link) if not isinstance(the_pseudo, UpfData): raise InputValidationError("Pseudo for kind(s) {} is not of " "type UpfData".format(",".join(kinds))) for kind in kinds: if kind in pseudos: raise InputValidationError("Pseudo for kind {} passed " "more than one time".format(kind)) pseudos[kind] = the_pseudo parent_calc_folder = inputdict.pop(self.get_linkname('parent_folder'), None) if parent_calc_folder is not None: if not isinstance(parent_calc_folder, RemoteData): raise InputValidationError("parent_calc_folder, if specified, " "must be of type RemoteData") vdw_table = inputdict.pop(self.get_linkname('vdw_table'), None) if vdw_table is not None: if not isinstance(vdw_table, SinglefileData): raise InputValidationError("vdw_table, if specified, " "must be of type SinglefileData") # Here, there should be no more parameters... if inputdict: raise InputValidationError("The following input data nodes are " "unrecognized: {}".format(inputdict.keys())) # Check that the first and last image have the same cell if abs(np.array(first_structure.cell)- np.array(last_structure.cell)).max() > 1.e-4: raise InputValidationError("Different cell in the fist and last image") # Check that the first and last image have the same number of sites if len(first_structure.sites) != len(last_structure.sites): raise InputValidationError("Different number of sites in the fist and last image") # Check that sites in the initial and final structure have the same kinds if not first_structure.get_site_kindnames() == last_structure.get_site_kindnames(): raise InputValidationError("Mismatch between the kind names and/or oder between " "the first and final image") # Check structure, get species, check peudos kindnames = [k.name for k in first_structure.kinds] if set(kindnames) != set(pseudos.keys()): err_msg = ("Mismatch between the defined pseudos and the list of " "kinds of the structure. Pseudos: {}; kinds: {}".format( ",".join(pseudos.keys()), ",".join(list(kindnames)))) raise InputValidationError(err_msg) ############################## # END OF INITIAL INPUT CHECK # ############################## # I create the subfolder that will contain the pseudopotentials tempfolder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True) # I create the subfolder with the output data (sometimes Quantum # Espresso codes crash if an empty folder is not already there tempfolder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) # We first prepare the NEB-specific input file input_filecontent = self._generate_NEBinputdata(neb_parameters,settings_dict) input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME) with open(input_filename, 'w') as infile: infile.write(input_filecontent) # We now generate the PW input files for each input structure local_copy_pseudo_list = [] for i, structure in enumerate([first_structure, last_structure]): # We need to a pass a copy of the settings_dict for each structure this_settings_dict = copy.deepcopy(settings_dict) input_filecontent, this_local_copy_pseudo_list = self._generate_PWCPinputdata(pw_parameters,this_settings_dict, pseudos,structure,kpoints) local_copy_pseudo_list += this_local_copy_pseudo_list input_filename = tempfolder.get_abs_path('pw_{}.in'.format(i+1)) with open(input_filename, 'w') as infile: infile.write(input_filecontent) # We need to pop the settings that were used in the PW calculations for key in settings_dict.keys(): if key not in this_settings_dict.keys(): settings_dict.pop(key) # We avoid to copy twice the same pseudopotential to the same filename local_copy_pseudo_list = set(local_copy_pseudo_list) # We check that two different pseudopotentials are not copied # with the same name (otherwise the first is overwritten) if len(set([ pseudoname for local_path, pseudoname in local_copy_pseudo_list])) < len(local_copy_pseudo_list): raise InputValidationError("Same filename for two different pseudopotentials") local_copy_list += local_copy_pseudo_list # If present, add also the Van der Waals table to the pseudo dir # Note that the name of the table is not checked but should be the # one expected by QE. if vdw_table: local_copy_list.append( ( vdw_table.get_file_abs_path(), os.path.join(self._PSEUDO_SUBFOLDER, os.path.split(vdw_table.get_file_abs_path())[1]) ) ) # operations for restart symlink = settings_dict.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: if parent_calc_folder is not None: # I put the symlink to the old parent ./out folder remote_symlink_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._OUTPUT_SUBFOLDER,'*'), self._OUTPUT_SUBFOLDER )) # and to the old parent prefix.path remote_symlink_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), '{}.path'.format(self._PREFIX)), '{}.path'.format(self._PREFIX) )) else: # copy remote output dir and .path file, if specified if parent_calc_folder is not None: remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._OUTPUT_SUBFOLDER,'*'), self._OUTPUT_SUBFOLDER )) # and to the old parent prefix.path remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), '{}.path'.format(self._PREFIX)), '{}.path'.format(self._PREFIX) )) # here we may create an aiida.EXIT file create_exit_file = settings_dict.pop('ONLY_INITIALIZATION',False) if create_exit_file: exit_filename = tempfolder.get_abs_path( '{}.EXIT'.format(self._PREFIX)) with open(exit_filename,'w') as f: f.write('\n') calcinfo = CalcInfo() codeinfo=CodeInfo() calcinfo.uuid = self.uuid # Empty command line by default cmdline_params = settings_dict.pop('CMDLINE', []) # For the time-being we only have the initial and final image calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # In neb calculations there is no input read from standard input!! codeinfo.cmdline_params = (["-input_images", "2"] + list(cmdline_params)) codeinfo.stdout_name = self._OUTPUT_FILE_NAME codeinfo.code_uuid = code.uuid calcinfo.codes_info = [codeinfo] # Retrieve by default the output file and ... calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self._OUTPUT_FILE_NAME) calcinfo.retrieve_list.append([os.path.join(self._OUTPUT_SUBFOLDER, self._PREFIX + '_*[0-9]', 'PW.out'), '.', 2]) calcinfo.retrieve_list.append([os.path.join(self._OUTPUT_SUBFOLDER, self._PREFIX + '_*[0-9]',self._PREFIX + '.save', self._DATAFILE_XML_BASENAME), '.', 3]) settings_retrieve_list = settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += settings_retrieve_list calcinfo.retrieve_list += self._internal_retrieve_list if settings_dict: try: Parserclass = self.get_parserclass() parser = Parserclass(self) parser_opts = parser.get_parser_settings_key() settings_dict.pop(parser_opts) except (KeyError, AttributeError): # the key parser_opts isn't inside the dictionary raise InputValidationError("The following keys have been found in " "the settings input node, but were not understood: {}".format( ",".join(settings_dict.keys()))) return calcinfo
def prepare_for_submission(self, tempfolder): _dbs_accepted = { 'gw0': 'ndb.QP', 'HF_and_locXC': 'ndb.HF_and_locXC', } local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] # Settings can be undefined, and defaults to an empty dictionary. # They will be used for any input that doen't fit elsewhere. settings = self.inputs.settings.get_dict() initialise = settings.pop('INITIALISE', None) if initialise is not None: if not isinstance(initialise, bool): raise InputValidationError("INITIALISE must be " " a boolean") copy_save = settings.pop('COPY_SAVE', None) if copy_save is not None: if not isinstance(copy_save, bool): raise InputValidationError("COPY_SAVE must be " " a boolean") copy_dbs = settings.pop('COPY_DBS', None) if copy_dbs is not None: if not isinstance(copy_dbs, bool): raise InputValidationError("COPY_DBS must be " " a boolean") restart_yambo = settings.pop('RESTART_YAMBO', None) if restart_yambo is not None: if not isinstance(restart_yambo, bool): raise InputValidationError("RESTART_YAMBO must be " " a boolean") parameters = self.inputs.parameters if not initialise: if not isinstance(parameters, Dict): raise InputValidationError("parameters is not of type Dict") parent_calc_folder = self.inputs.parent_folder main_code = self.inputs.code preproc_code = self.inputs.preprocessing_code parent_calc = take_calc_from_remote(parent_calc_folder) if parent_calc.process_type == 'aiida.calculations:yambo.yambo': yambo_parent = True else: yambo_parent = False # flags for yambo interfaces try: precode_param_dict = self.inputs.precode_parameters except: precode_param_dict = Dict(dict={}) # check the precode parameters given in input input_cmdline = settings.pop('CMDLINE', None) import re precode_params_list = [ ] #['cd aiida.save'] ##.format(parent_calc_folder._PREFIX) pattern = re.compile(r"(^\-)([a-zA-Z])") for key, value in six.iteritems(precode_param_dict.get_dict()): if re.search(pattern, key) is not None: if key == '-O' or key == '-H' or key == '-h' or key == '-F': raise InputValidationError( "Precode flag {} is not allowed".format(str(key))) else: if precode_param_dict[key] is True: precode_params_list.append(str(key)) elif precode_param_dict[key] is False: pass else: precode_params_list.append('{}'.format(str(key))) precode_params_list.append('{}'.format(str(value))) else: raise InputValidationError( "Wrong format of precode_parameters") # Adding manual cmdline input (e.g. for DB fragmentation) if input_cmdline is not None: precode_params_list = precode_params_list + input_cmdline # TODO: check that remote data must be on the same computer ############################## # END OF INITIAL INPUT CHECK # ############################## if not initialise: ################################################### # Prepare yambo input file ################################################### params_dict = parameters.get_dict() # extract boolean keys boolean_dict = { k: v for k, v in six.iteritems(params_dict) if isinstance(v, bool) } params_dict = { k: v for k, v in six.iteritems(params_dict) if k not in list(boolean_dict.keys()) } # reorganize the dictionary and create a list of dictionaries with key, value and units parameters_list = [] for k, v in six.iteritems(params_dict): if "_units" in k: continue units_key = "{}_units".format(k) try: units = params_dict[units_key] except KeyError: units = None this_dict = {} this_dict['key'] = k this_dict['value'] = v this_dict['units'] = units parameters_list.append(this_dict) input_filename = tempfolder.get_abs_path( self.metadata.options.input_filename) with open(input_filename, 'w') as infile: infile.write(self.metadata.options.logostring) for k, v in six.iteritems(boolean_dict): if v: infile.write("{}\n".format(k)) for this_dict in parameters_list: key = this_dict['key'] value = this_dict['value'] units = this_dict['units'] if isinstance(value, list): value_string = '' try: for v in value: value_string += " | ".join([str(_) for _ in v ]) + " |\n" except: value_string += " | ".join([str(_) for _ in value ]) + " |\n" the_string = "% {}\n {}".format(key, value_string) the_string += "%" else: the_value = '"{}"'.format(value) if isinstance( value, six.string_types) else '{}'.format(value) the_string = "{} = {}".format(key, the_value) if units is not None: the_string += " {}".format(units) infile.write(the_string + "\n") ############################################ # set copy of the parent calculation ############################################ try: parent_calc = parent_calc_folder.get_incoming().all_nodes()[ -1] #to load the node from a workchain... except: parent_calc = parent_calc_folder.get_incoming().get_node_by_label( 'remote_folder') if yambo_parent: if copy_save: try: remote_copy_list.append( (parent_calc_folder.computer.uuid, parent_calc_folder.get_remote_path() + "/SAVE/", './SAVE/')) except: remote_copy_list.append( (parent_calc_folder.computer.uuid, parent_calc_folder.get_remote_path() + "out/aiida.save/SAVE/", './SAVE/')) else: try: remote_symlink_list.append( (parent_calc_folder.computer.uuid, parent_calc_folder.get_remote_path() + "/SAVE/", './SAVE/')) except: remote_symlink_list.append( (parent_calc_folder.computer.uuid, parent_calc_folder.get_remote_path() + "out/aiida.save/SAVE/", './SAVE/')) if copy_dbs: remote_copy_list.append( (parent_calc_folder.computer.uuid, parent_calc_folder.get_remote_path() + "/aiida.out/", './aiida.out/')) if restart_yambo: remote_symlink_list.append( (parent_calc_folder.computer.uuid, parent_calc_folder.get_remote_path() + "/aiida.out/", './aiida.out/')) else: remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), PwCalculation._OUTPUT_SUBFOLDER, "aiida.save", "*"), ##.format(parent_calc_folder._PREFIX) ".")) ############################################ # set Calcinfo ############################################ calcinfo = CalcInfo() calcinfo.uuid = self.uuid calcinfo.local_copy_list = [] calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append('r*') calcinfo.retrieve_list.append('l*') calcinfo.retrieve_list.append('o*') calcinfo.retrieve_list.append('LOG/l*_CPU_1') calcinfo.retrieve_list.append('LOG/l*_CPU_2') calcinfo.retrieve_list.append('*stderr*') #standard errors extra_retrieved = [] if initialise: # extra_retrieved.append('SAVE/'+_dbs_accepted['ns.db1']) pass else: for dbs in _dbs_accepted.keys(): db = boolean_dict.pop(dbs, False) if db: extra_retrieved.append('aiida.out/' + _dbs_accepted[dbs]) additional = settings.pop('ADDITIONAL_RETRIEVE_LIST', []) if additional: extra_retrieved.append(additional) for extra in extra_retrieved: calcinfo.retrieve_list.append(extra) from aiida.common.datastructures import CodeRunMode, CodeInfo # c1 = interface dft codes and yambo (ex. p2y or a2y) c1 = CodeInfo() c1.withmpi = True c1.cmdline_params = precode_params_list # c2 = yambo initialization c2 = CodeInfo() c2.withmpi = True c2.cmdline_params = [] c2.code_uuid = main_code.uuid # if the parent calculation is a yambo calculation skip the interface (c1) and the initialization (c2) if yambo_parent: try: parent_settings = _uppercase_dict( parent_calc.inputs.settings.get_dict(), dict_name='parent settings') parent_initialise = parent_settings['INITIALISE'] except KeyError: parent_initialise = False c1 = None if not parent_initialise: c2 = None else: c1.cmdline_params = precode_params_list c1.code_uuid = preproc_code.uuid # c3 = yambo calculation c3 = CodeInfo() c3.withmpi = True #c3.withmpi = self.get_withmpi() c3.cmdline_params = [ "-F", self.metadata.options.input_filename, \ '-J', self.metadata.options.output_filename, \ ] c3.code_uuid = main_code.uuid if initialise: c2 = None c3 = None #logic of the execution #calcinfo.codes_info = [c1, c2, c3] if not yambo_parent else [c3] if yambo_parent: if not parent_initialise: calcinfo.codes_info = [c3] else: calcinfo.codes_info = [c2, c3] elif initialise: calcinfo.codes_info = [c1] else: calcinfo.codes_info = [c1, c2, c3] calcinfo.codes_run_mode = CodeRunMode.SERIAL if settings: raise InputValidationError( "The following keys have been found in " "the settings input node, but were not understood: {}".format( ",".join(list(settings.keys())))) return calcinfo