def test_double_list_invalid_type(self): """Values in the double nested lists should be integers or strings.""" parameters = { 'starting_ns_eigenvalues': [ [1, (), 2, 10], ] } for key, value in parameters.items(): with self.assertRaises(ValueError): convert_input_to_namelist_entry(key, value, self.mapping)
def test_double_list_no_mapping(self): """If the double list contains valid kinds, a mapping has to be provided.""" parameters = { 'starting_ns_eigenvalues': [ [1, 'Co', 2, 10], ] } for key, value in parameters.items(): with self.assertRaises(ValueError): convert_input_to_namelist_entry(key, value, None)
def test_double_list_invalid_kind(self): """If a value in the nested list is a string it should be present as a key in the mapping dictionary.""" parameters = { 'starting_ns_eigenvalues': [ [1, 'Ni', 2, 10], ] } for key, value in parameters.items(): with self.assertRaises(ValueError): convert_input_to_namelist_entry(key, value, self.mapping)
def validate_converted_output(self, parameters, expected): """Validate recursively the two dictionaries.""" for key, value in parameters.items(): converted = convert_input_to_namelist_entry( key, value, self.mapping) lines = [line for line in converted.split('\n') if line] for index, line in enumerate(lines): self.assertIn(expected[key][index], line)
def _generate_NEBinputdata(self,neb_parameters,settings_dict): """ This methods generate the input data for the NEB part of the calculation """ # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(neb_parameters.get_dict(), dict_name='parameters') input_params = {k: _lowercase_dict(v, dict_name=k) for k, v in input_params.iteritems()} # For the neb input there is no blocked keyword # Create an empty dictionary for the compulsory namelist 'PATH' # if not present if 'PATH' not in input_params: input_params['PATH'] = {} # In case of climbing image, we need the corresponding card climbing_image = False if input_params['PATH'].get('ci_scheme','no-ci').lower() in ['manual']: climbing_image = True try: climbing_image_list = settings_dict.pop("CLIMBING_IMAGES") except KeyError: raise InputValidationError("No climbing image specified for this calculation") if not isinstance(climbing_image_list, list): raise InputValidationError("Climbing images should be provided as a list") if [ i for i in climbing_image_list if i<2 or i >= input_params['PATH'].get('num_of_images',2)]: raise InputValidationError("The climbing images should be in the range between the first " "and the last image") climbing_image_card = "CLIMBING_IMAGES\n" climbing_image_card += ", ".join([str(_) for _ in climbing_image_list]) + "\n" inputfile = "" inputfile += "&PATH\n" # namelist content; set to {} if not present, so that we leave an # empty namelist namelist = input_params.pop('PATH', {}) for k, v in sorted(namelist.iteritems()): inputfile += convert_input_to_namelist_entry(k, v) inputfile += "/\n" # Write cards now if climbing_image: inputfile += climbing_image_card if input_params: raise InputValidationError( "The following namelists are specified in input_params, but are " "not valid namelists for the current type of calculation: " "{}".format(",".join(input_params.keys()))) return inputfile
def validate_converted_output(self, parameters, expected): """ For each key, value in the parameters dictionary, verify that the output produced by the utility function convert_input_to_namelist_entry matches the expected strings """ for key, value in six.iteritems(parameters): converted = convert_input_to_namelist_entry( key, value, self.mapping) lines = [line for line in converted.split('\n') if line] for index, line in enumerate(lines): self.assertIn(expected[key][index], line)
def generate_input_file(parameters): """Generate namelist input_file content given a dict of parameters. :param parameters: 'dict' containing the fortran namelists and parameters to be used. e.g.: {'CONTROL':{'calculation':'scf'}, 'SYSTEM':{'ecutwfc':30}} :return: 'str' containing the input_file content a plain text. """ file_lines = [] for namelist_name, namelist in parameters.items(): file_lines.append('&{0}'.format(namelist_name)) for key, value in sorted(namelist.items()): file_lines.append(convert_input_to_namelist_entry(key, value)[:-1]) file_lines.append('/') return '\n'.join(file_lines)
def write_input_files(self, tempfolder, input_parameters): """ Take the input_parameters dictionary with the namelists and their flags and write the input file to disk in the temporary folder :param tempfolder: an aiida.common.folders.Folder to temporarily write files on disk :param input_parameters: a dictionary with input namelists and their flags """ filename = tempfolder.get_abs_path(self.input_file_name) with open(filename, 'w') as handle: for namelist_name in self._compulsory_namelists: namelist = input_parameters.pop(namelist_name) handle.write('&{0}\n'.format(namelist_name)) for key, value in sorted(namelist.iteritems()): handle.write(convert_input_to_namelist_entry(key, value)) handle.write('/\n') if input_parameters: raise InputValidationError( 'these specified namelists are invalid: {}'.format(', '.join( input_parameters.keys())))
def prepare_for_submission(self, folder): # pylint: disable=too-many-branches,too-many-statements """Prepare the calculation job for submission by transforming input nodes into input files. In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that contains lists of files that need to be copied to the remote machine before job submission, as well as file lists that are to be retrieved after job completion. :param folder: a sandbox folder to temporarily write files on disk. :return: :py:`~aiida.common.datastructures.CalcInfo` instance. """ # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()} # Same for settings. if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} # Set default values. NOTE: this is different from PW/CP for blocked in self._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in parameters: if key in parameters[namelist]: raise exceptions.InputValidationError( "You cannot specify explicitly the '{}' key in the '{}' " 'namelist.'.format(key, namelist)) else: parameters[namelist] = {} parameters[namelist][key] = value # Restrict the plot output to the file types that we want to be able to parse dimension_to_output_format = { 0: 0, # Spherical integration -> Gnuplot, 1D 1: 0, # 1D -> Gnuplot, 1D 2: 7, # 2D -> Gnuplot, 2D 3: 6, # 3D -> Gaussian cube 4: 0, # Polar on a sphere -> # Gnuplot, 1D } parameters['PLOT']['output_format'] = dimension_to_output_format[parameters['PLOT']['iflag']] namelists_toprint = self._default_namelists input_filename = self.inputs.metadata.options.input_filename with folder.open(input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write('&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(namelist.items()): infile.write(convert_input_to_namelist_entry(key, value)) infile.write('/\n') # Check for specified namelists that are not expected if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) remote_copy_list = [] local_copy_list = [] # Copy remote output dir parent_calc_folder = self.inputs.get('parent_folder', None) if isinstance(parent_calc_folder, orm.RemoteData): remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), self._INPUT_SUBFOLDER), self._OUTPUT_SUBFOLDER )) remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), self._PSEUDO_SUBFOLDER), self._PSEUDO_SUBFOLDER )) elif isinstance(parent_calc_folder, orm.FolderData): for filename in parent_calc_folder.list_object_names(): local_copy_list.append(( parent_calc_folder.uuid, filename, os.path.join(self._OUTPUT_SUBFOLDER, filename) )) local_copy_list.append(( parent_calc_folder.uuid, filename, os.path.join(self._PSEUDO_SUBFOLDER, filename) )) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = settings.pop('CMDLINE', []) codeinfo.stdin_name = self.inputs.metadata.options.input_filename codeinfo.stdout_name = self.inputs.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list # Retrieve by default the output file calcinfo.retrieve_list = [self.inputs.metadata.options.output_filename] calcinfo.retrieve_temporary_list = [] # Depending on the `plot_num` and the corresponding parameters, more than one pair of `filplot` + `fileout` # files may be written. In that case, the data files will have `filplot` as a prefix with some suffix to # distinguish them from one another. The `fileout` filename will be the full data filename with the `fileout` # value as a suffix. retrieve_tuples = [ self._FILEOUT, ('{}_*{}'.format(self._FILPLOT, self._FILEOUT), '.', 0) ] if self.inputs.metadata.options.keep_plot_file: calcinfo.retrieve_list.extend(retrieve_tuples) else: calcinfo.retrieve_temporary_list.extend(retrieve_tuples) return calcinfo
def _generate_input_files(cls, neb_parameters, settings_dict): """Generate the input data for the NEB part of the calculation.""" # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(neb_parameters.get_dict(), dict_name='parameters') input_params = {k: _lowercase_dict(v, dict_name=k) for k, v in input_params.items()} # Force default values for blocked keywords. NOTE: this is different from PW/CP for blocked in cls._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in input_params: if key in input_params[namelist]: raise InputValidationError( f"You cannot specify explicitly the '{key}' key in the '{namelist}' namelist." ) else: input_params[namelist] = {} input_params[namelist][key] = value # Create an empty dictionary for the compulsory namelist 'PATH' if not present if 'PATH' not in input_params: input_params['PATH'] = {} # In case of climbing image, we need the corresponding card ci_scheme = input_params['PATH'].get('ci_scheme', 'no-ci').lower() climbing_image_list = settings_dict.pop('CLIMBING_IMAGES', None) if ci_scheme == 'manual': manual_climbing_image = True if climbing_image_list is None: raise InputValidationError( "'ci_scheme' is {}, but no climbing images were specified for this " 'calculation.'.format(ci_scheme) ) if not isinstance(climbing_image_list, list): raise InputValidationError('Climbing images should be provided as a list.') num_of_images = input_params['PATH'].get('num_of_images', 2) if any([(i < 2 or i >= num_of_images) for i in climbing_image_list]): raise InputValidationError( 'The climbing images should be in the range between the first ' 'and the last image (excluded).' ) climbing_image_card = 'CLIMBING_IMAGES\n' climbing_image_card += ', '.join([str(_) for _ in climbing_image_list]) + '\n' else: manual_climbing_image = False if climbing_image_list is not None: raise InputValidationError(f"Climbing images are not accepted when 'ci_scheme' is {ci_scheme}.") input_data = '&PATH\n' # namelist content; set to {} if not present, so that we leave an empty namelist namelist = input_params.pop('PATH', {}) for key, value in sorted(namelist.items()): input_data += convert_input_to_namelist_entry(key, value) input_data += '/\n' # Write CI cards now if manual_climbing_image: input_data += climbing_image_card if input_params: raise InputValidationError( 'The following namelists are specified in input_params, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(input_params.keys()))) ) return input_data
def prepare_for_submission(self, folder): """Create the input files from the input nodes passed to this instance of the `CalcJob`. :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk :return: `aiida.common.datastructures.CalcInfo` instance """ local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} parent_folder = self.inputs.parent_folder parent_calcs = parent_folder.get_incoming( node_class=orm.CalcJobNode).all() if not parent_calcs: raise exceptions.NotExistent( 'parent_folder<{}> has no parent calculation'.format( parent_folder.pk)) elif len(parent_calcs) > 1: raise exceptions.UniquenessError( 'parent_folder<{}> has multiple parent calculations'.format( parent_folder.pk)) parent_calc = parent_calcs[0].node # If the parent calculation is a `PhCalculation` we are restarting restart_flag = parent_calc.process_type == 'aiida.calculations:quantumespresso.ph' # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}' .format(parent_calc.computer.get_name())) # put by default, default_parent_output_folder = ./out try: default_parent_output_folder = parent_calc.process_class._OUTPUT_SUBFOLDER except AttributeError: try: default_parent_output_folder = parent_calc._get_output_folder() except AttributeError: raise exceptions.InputValidationError( 'parent calculation does not have a default output subfolder' ) parent_calc_out_subfolder = settings.pop('PARENT_CALC_OUT_SUBFOLDER', default_parent_output_folder) # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = { k: _lowercase_dict(v, dict_name=k) for k, v in six.iteritems(parameters) } prepare_for_d3 = settings.pop('PREPARE_FOR_D3', False) if prepare_for_d3: self._blocked_keywords += [('INPUTPH', 'fildrho'), ('INPUTPH', 'drho_star%open'), ('INPUTPH', 'drho_star%ext'), ('INPUTPH', 'drho_star%dir')] for namelist, flag in self._blocked_keywords: if namelist in parameters: if flag in parameters[namelist]: raise exceptions.InputValidationError( "Cannot specify explicitly the '{}' flag in the '{}' namelist or card." .format(flag, namelist)) if 'INPUTPH' not in parameters: raise exceptions.InputValidationError( 'required namelist INPUTPH not specified') parameters['INPUTPH']['outdir'] = self._OUTPUT_SUBFOLDER parameters['INPUTPH']['iverbosity'] = 1 parameters['INPUTPH']['prefix'] = self._PREFIX parameters['INPUTPH']['fildyn'] = self._OUTPUT_DYNAMICAL_MATRIX_PREFIX if prepare_for_d3: parameters['INPUTPH']['fildrho'] = self._DRHO_PREFIX parameters['INPUTPH']['drho_star%open'] = True parameters['INPUTPH']['drho_star%ext'] = self._DRHO_STAR_EXT parameters['INPUTPH']['drho_star%dir'] = self._FOLDER_DRHO try: mesh, offset = self.inputs.qpoints.get_kpoints_mesh() if any([i != 0. for i in offset]): raise NotImplementedError( 'Computation of phonons on a mesh with non zero offset is not implemented, at the level of ph.x' ) parameters['INPUTPH']['ldisp'] = True parameters['INPUTPH']['nq1'] = mesh[0] parameters['INPUTPH']['nq2'] = mesh[1] parameters['INPUTPH']['nq3'] = mesh[2] postpend_text = None except AttributeError: # this is the case where no mesh was set. Maybe it's a list try: list_of_points = self.inputs.qpoints.get_kpoints( cartesian=True) except AttributeError: # In this case, there are no info on the qpoints at all raise exceptions.InputValidationError( 'Input `qpoints` contains neither a mesh nor a list of points' ) # change to 2pi/a coordinates lattice_parameter = numpy.linalg.norm(self.inputs.qpoints.cell[0]) list_of_points *= lattice_parameter / (2. * numpy.pi) # add here the list of point coordinates if len(list_of_points) > 1: parameters['INPUTPH']['qplot'] = True parameters['INPUTPH']['ldisp'] = True postpend_text = u'{}\n'.format(len(list_of_points)) for points in list_of_points: postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f} 1\n'.format( *points) # Note: the weight is fixed to 1, because ph.x calls these # things weights but they are not such. If they are going to # exist with the meaning of weights, they will be supported else: parameters['INPUTPH']['ldisp'] = False postpend_text = u'' for points in list_of_points: postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f}\n'.format( *points) # customized namelists, otherwise not present in the distributed ph code try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " 'node, must be a list of strings') except KeyError: # list of namelists not specified in the settings; do automatic detection namelists_toprint = self._compulsory_namelists # create a folder for the dynamical matrices if not restart_flag: # if it is a restart, it will be copied over folder.get_subfolder(self._FOLDER_DYNAMICAL_MATRIX, create=True) with folder.open(self.metadata.options.input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write(u'&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(six.iteritems(namelist)): infile.write(convert_input_to_namelist_entry(key, value)) infile.write(u'/\n') # add list of qpoints if required if postpend_text is not None: infile.write(postpend_text) if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) # copy the parent scratch symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: # I create a symlink to each file/folder in the parent ./out folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), parent_calc_out_subfolder, '*'), self._OUTPUT_SUBFOLDER)) # I also create a symlink for the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) else: # here I copy the whole folder ./out remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) # I also copy the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) if restart_flag: # in this case, copy in addition also the dynamical matrices if symlink: remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), self._FOLDER_DYNAMICAL_MATRIX)) else: # copy the dynamical matrices # no need to copy the _ph0, since I copied already the whole ./out folder remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), '.')) # Create an `.EXIT` file if `only_initialization` flag in `settings` is set to `True` if settings.pop('ONLY_INITIALIZATION', False): with folder.open('{}.EXIT'.format(self._PREFIX), 'w') as handle: handle.write('\n') codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = ( list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename]) codeinfo.stdout_name = self.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.uuid = str(self.uuid) calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file and the xml file filepath_xml_tensor = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0', '{}.phsave'.format(self._PREFIX)) calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.metadata.options.output_filename) calcinfo.retrieve_list.append(self._FOLDER_DYNAMICAL_MATRIX) calcinfo.retrieve_list.append( os.path.join(filepath_xml_tensor, self._OUTPUT_XML_TENSOR_FILE_NAME)) calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', []) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError( '`settings` contained unexpected keys: {}'.format( unknown_keys)) return calcinfo
def prepare_for_submission(self, folder): # pylint: disable=too-many-statements,too-many-branches """Prepare the calculation job for submission by transforming input nodes into input files. In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that contains lists of files that need to be copied to the remote machine before job submission, as well as file lists that are to be retrieved after job completion. :param folder: a sandbox folder to temporarily write files on disk. :return: :py:`~aiida.common.datastructures.CalcInfo` instance. """ def test_offset(offset): """Check if the grid has an offset.""" if any([i != 0. for i in offset]): raise NotImplementedError( 'Computation of electron-phonon on a mesh with non zero offset is not implemented, ' 'at the level of epw.x') # pylint: disable=too-many-statements,too-many-branches local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} # Copy nscf folder parent_folder_nscf = self.inputs.parent_folder_nscf parent_calc_nscf = parent_folder_nscf.creator if parent_calc_nscf is None: raise exceptions.NotExistent('parent_folder<{}> has no parent calculation'.format(parent_folder_nscf.pk)) # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc_nscf.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}'.format( parent_calc_nscf.computer.get_name())) # put by default, default_parent_output_folder = ./out parent_calc_out_subfolder_nscf = parent_calc_nscf.process_class._OUTPUT_SUBFOLDER # pylint: disable=protected-access # Now phonon folder parent_folder_ph = self.inputs.parent_folder_ph parent_calc_ph = parent_folder_ph.creator # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc_ph.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}'.format( parent_calc_ph.computer.get_name())) # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()} if 'INPUTEPW' not in parameters: raise exceptions.InputValidationError('required namelist INPUTEPW not specified') parameters['INPUTEPW']['outdir'] = self._OUTPUT_SUBFOLDER parameters['INPUTEPW']['iverbosity'] = 1 parameters['INPUTEPW']['prefix'] = self._PREFIX try: mesh, offset = self.inputs.qpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nq1'] = mesh[0] parameters['INPUTEPW']['nq2'] = mesh[1] parameters['INPUTEPW']['nq3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the coarse q-point grid') from exception try: mesh, offset = self.inputs.kpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nk1'] = mesh[0] parameters['INPUTEPW']['nk2'] = mesh[1] parameters['INPUTEPW']['nk3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the coarse k-point grid') from exception try: mesh, offset = self.inputs.qfpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nqf1'] = mesh[0] parameters['INPUTEPW']['nqf2'] = mesh[1] parameters['INPUTEPW']['nqf3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the fine q-point grid') from exception try: mesh, offset = self.inputs.kfpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nkf1'] = mesh[0] parameters['INPUTEPW']['nkf2'] = mesh[1] parameters['INPUTEPW']['nkf3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the fine k-point grid') from exception # customized namelists, otherwise not present in the distributed epw code try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " 'node, must be a list of strings') except KeyError: # list of namelists not specified in the settings; do automatic detection namelists_toprint = self._compulsory_namelists # create the save folder with dvscf and dyn files. folder.get_subfolder(self._FOLDER_SAVE, create=True) # List of IBZ q-point to be added below EPW. To be removed when removed from EPW. qibz_ar = [] for key, value in sorted(parent_folder_ph.creator.outputs.output_parameters.get_dict().items()): if key.startswith('dynamical_matrix_'): qibz_ar.append(value['q_point']) qibz_node = orm.ArrayData() qibz_node.set_array('qibz', np.array(qibz_ar)) list_of_points = qibz_node.get_array('qibz') # Number of q-point in the irreducible Brillouin Zone. nqpt = len(list_of_points[0, :]) # add here the list of point coordinates if len(list_of_points) > 1: postpend_text = '{} cartesian\n'.format(len(list_of_points)) for points in list_of_points: postpend_text += '{0:18.10f} {1:18.10f} {2:18.10f} \n'.format(*points) with folder.open(self.metadata.options.input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write('&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(namelist.items()): infile.write(convert_input_to_namelist_entry(key, value)) infile.write('/\n') # add list of qpoints if required if postpend_text is not None: infile.write(postpend_text) if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) # copy the parent scratch symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: # I create a symlink to each file/folder in the parent ./out folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) remote_symlink_list.append(( parent_folder_nscf.computer.uuid, os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf, '*'), self._OUTPUT_SUBFOLDER )) else: # here I copy the whole folder ./out remote_copy_list.append(( parent_folder_nscf.computer.uuid, os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf), self._OUTPUT_SUBFOLDER )) prefix = self._PREFIX for iqpt in range(1, nqpt+1): label = str(iqpt) tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-0') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dyn_q0')) tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-'+label) remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dyn_q'+label)) if iqpt == 1: tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.dvscf*') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dvscf_q'+label)) tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.phsave') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/')) else: tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.q_'+label+'/'+prefix+'.dvscf*') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dvscf_q'+label)) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = (list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename]) codeinfo.stdout_name = self.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.metadata.options.output_filename) calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', []) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys)) return calcinfo
def _prepare_for_submission(self, tempfolder, inputdict): """ This is the routine to be called when you want to create the input files and related stuff with a plugin. :param tempfolder: a aiida.common.folders.Folder subclass where the plugin should put all its files. :param inputdict: a dictionary with the input nodes, as they would be returned by get_inputs_dict (without the Code!) """ local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] try: parameters = inputdict.pop(self.get_linkname('parameters')) except KeyError: raise InputValidationError( "No parameters specified for this calculation") if not isinstance(parameters, ParameterData): raise InputValidationError( "parameters is not of type ParameterData") try: structure = inputdict.pop(self.get_linkname('structure')) except KeyError: raise InputValidationError( "No structure specified for this calculation") if not isinstance(structure, StructureData): raise InputValidationError( "structure is not of type StructureData") if self._use_kpoints: try: kpoints = inputdict.pop(self.get_linkname('kpoints')) except KeyError: raise InputValidationError( "No kpoints specified for this calculation") if not isinstance(kpoints, KpointsData): raise InputValidationError( "kpoints is not of type KpointsData") else: kpoints = None # Settings can be undefined, and defaults to an empty dictionary settings = inputdict.pop(self.get_linkname('settings'), None) if settings is None: settings_dict = {} else: if not isinstance(settings, ParameterData): raise InputValidationError( "settings, if specified, must be of " "type ParameterData") # Settings converted to uppercase settings_dict = _uppercase_dict(settings.get_dict(), dict_name='settings') pseudos = {} # I create here a dictionary that associates each kind name to a pseudo for link in inputdict.keys(): if link.startswith(self._get_linkname_pseudo_prefix()): kindstring = link[len(self._get_linkname_pseudo_prefix()):] kinds = kindstring.split('_') the_pseudo = inputdict.pop(link) if not isinstance(the_pseudo, UpfData): raise InputValidationError( "Pseudo for kind(s) {} is not of " "type UpfData".format(",".join(kinds))) for kind in kinds: if kind in pseudos: raise InputValidationError( "Pseudo for kind {} passed " "more than one time".format(kind)) pseudos[kind] = the_pseudo parent_calc_folder = inputdict.pop(self.get_linkname('parent_folder'), None) if parent_calc_folder is not None: if not isinstance(parent_calc_folder, RemoteData): raise InputValidationError("parent_calc_folder, if specified, " "must be of type RemoteData") vdw_table = inputdict.pop(self.get_linkname('vdw_table'), None) if vdw_table is not None: if not isinstance(vdw_table, SinglefileData): raise InputValidationError("vdw_table, if specified, " "must be of type SinglefileData") hubbard_file = inputdict.pop(self.get_linkname('hubbard_file'), None) if hubbard_file is not None: if not isinstance(hubbard_file, SinglefileData): raise InputValidationError( 'hubbard_file, if specified, must be of type SinglefileData' ) try: code = inputdict.pop(self.get_linkname('code')) except KeyError: raise InputValidationError( "No code specified for this calculation") # Here, there should be no more parameters... if inputdict: raise InputValidationError("The following input data nodes are " "unrecognized: {}".format( inputdict.keys())) # Check structure, get species, check peudos kindnames = [k.name for k in structure.kinds] if set(kindnames) != set(pseudos.keys()): err_msg = ("Mismatch between the defined pseudos and the list of " "kinds of the structure. Pseudos: {}; kinds: {}".format( ",".join(pseudos.keys()), ",".join(list(kindnames)))) raise InputValidationError(err_msg) ############################## # END OF INITIAL INPUT CHECK # ############################## # I create the subfolder that will contain the pseudopotentials tempfolder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True) # I create the subfolder with the output data (sometimes Quantum # Espresso codes crash if an empty folder is not already there tempfolder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) # If present, add also the Van der Waals table to the pseudo dir # Note that the name of the table is not checked but should be the # one expected by QE. if vdw_table: src_path = vdw_table.get_file_abs_path() dst_path = os.path.join( self._PSEUDO_SUBFOLDER, os.path.split(vdw_table.get_file_abs_path())[1]) local_copy_list.append((src_path, dst_path)) if hubbard_file: src_path = hubbard_file.get_file_abs_path() dst_path = self.input_file_name_hubbard_file local_copy_list.append((src_path, dst_path)) input_filecontent, local_copy_pseudo_list = self._generate_PWCPinputdata( parameters, settings_dict, pseudos, structure, kpoints) local_copy_list += local_copy_pseudo_list input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME) with open(input_filename, 'w') as infile: infile.write(input_filecontent) # operations for restart symlink = settings_dict.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: if parent_calc_folder is not None: # I put the symlink to the old parent ./out folder remote_symlink_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._restart_copy_from), self._restart_copy_to)) else: # copy remote output dir, if specified if parent_calc_folder is not None: remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._restart_copy_from), self._restart_copy_to)) # here we may create an aiida.EXIT file create_exit_file = settings_dict.pop('ONLY_INITIALIZATION', False) if create_exit_file: exit_filename = tempfolder.get_abs_path('{}.EXIT'.format( self._PREFIX)) with open(exit_filename, 'w') as f: f.write('\n') # Check if specific inputs for the ENVIRON module where specified environ_namelist = settings_dict.pop('ENVIRON', None) if environ_namelist is not None: if not isinstance(environ_namelist, dict): raise InputValidationError( "ENVIRON namelist should be specified as a dictionary") # We first add the environ flag to the command-line options (if not already present) try: if '-environ' not in settings_dict['CMDLINE']: settings_dict['CMDLINE'].append('-environ') except KeyError: settings_dict['CMDLINE'] = ['-environ'] # To create a mapping from the species to an incremental fortran 1-based index # we use the alphabetical order as in the inputdata generation mapping_species = { sp_name: (idx + 1) for idx, sp_name in enumerate( sorted([kind.name for kind in structure.kinds])) } environ_input_filename = tempfolder.get_abs_path( self._ENVIRON_INPUT_FILE_NAME) with open(environ_input_filename, 'w') as environ_infile: environ_infile.write("&ENVIRON\n") for k, v in sorted(environ_namelist.iteritems()): environ_infile.write( convert_input_to_namelist_entry( k, v, mapping=mapping_species)) environ_infile.write("/\n") # Check for the deprecated 'ALSO_BANDS' setting and if present fire a deprecation log message also_bands = settings_dict.pop('ALSO_BANDS', None) if also_bands: import logging from aiida.common.log import get_dblogger_extra logger = logging.LoggerAdapter(logger=self.logger, extra=get_dblogger_extra(self)) logger.warning( "The '{}' setting is deprecated as bands are now parsed by default. " "If you do not want the bands to be parsed set the '{}' to True {}. " "Note that the eigenvalue.xml files are also no longer stored in the repository" .format('also_bands', 'no_bands', type(self))) calcinfo = CalcInfo() calcinfo.uuid = self.uuid # Empty command line by default cmdline_params = settings_dict.pop('CMDLINE', []) # we commented calcinfo.stin_name and added it here in cmdline_params # in this way the mpirun ... pw.x ... < aiida.in # is replaced by mpirun ... pw.x ... -in aiida.in # in the scheduler, _get_run_line, if cmdline_params is empty, it # simply uses < calcinfo.stin_name calcinfo.cmdline_params = (list(cmdline_params) + ["-in", self._INPUT_FILE_NAME]) codeinfo = CodeInfo() codeinfo.cmdline_params = (list(cmdline_params) + ["-in", self._INPUT_FILE_NAME]) codeinfo.stdout_name = self._OUTPUT_FILE_NAME codeinfo.code_uuid = code.uuid calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self._OUTPUT_FILE_NAME) calcinfo.retrieve_list.append(self._DATAFILE_XML) calcinfo.retrieve_list += settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += self._internal_retrieve_list # Retrieve the k-point directories with the xml files to the temporary folder # to parse the band eigenvalues and occupations but not to have to save the raw files # if and only if the 'no_bands' key was not set to true in the settings no_bands = settings_dict.pop('NO_BANDS', False) if no_bands is False: xmlpaths = os.path.join(self._OUTPUT_SUBFOLDER, self._PREFIX + '.save', 'K*[0-9]', 'eigenval*.xml') calcinfo.retrieve_temporary_list = [[xmlpaths, '.', 2]] try: Parserclass = self.get_parserclass() parser = Parserclass(self) parser_opts = parser.get_parser_settings_key().upper() settings_dict.pop(parser_opts) except (KeyError, AttributeError): # the key parser_opts isn't inside the dictionary pass if settings_dict: raise InputValidationError( "The following keys have been found in " "the settings input node, but were not understood: {}".format( ",".join(settings_dict.keys()))) return calcinfo
def _prepare_for_submission(self, tempfolder, inputdict): """ This is the routine to be called when you want to create the input files and related stuff with a plugin. :param tempfolder: a aiida.common.folders.Folder subclass where the plugin should put all its files. :param inputdict: a dictionary with the input nodes, as they would be returned by get_inputdata_dict (without the Code!) """ local_copy_list = [] remote_copy_list = [] try: code = inputdict.pop(self.get_linkname('code')) except KeyError: raise InputValidationError( "No code specified for this calculation") try: parameters = inputdict.pop(self.get_linkname('parameters')) except KeyError: raise InputValidationError( "No parameters specified for this calculation") if not isinstance(parameters, ParameterData): raise InputValidationError( "parameters is not of type ParameterData") # Settings can be undefined, and defaults to an empty dictionary settings = inputdict.pop(self.get_linkname('settings'), None) if settings is None: settings_dict = {} else: if not isinstance(settings, ParameterData): raise InputValidationError( "settings, if specified, must be of " "type ParameterData") # Settings converted to uppercase settings_dict = _uppercase_dict(settings.get_dict(), dict_name='settings') parent_calc_folder = inputdict.pop(self.get_linkname('parent_folder'), None) if parent_calc_folder is not None: if not isinstance(parent_calc_folder, self._parent_folder_type): if not isinstance(self._parent_folder_type, tuple): possible_types = [self._parent_folder_type.__name__] else: possible_types = [ t.__name__ for t in self._parent_folder_type ] raise InputValidationError("parent_calc_folder, if specified," "must be of type {}".format( " or ".join(possible_types))) following_text = self._get_following_text(inputdict, settings) # Here, there should be no more parameters... if inputdict: raise InputValidationError("The following input data nodes are " "unrecognized: {}".format( inputdict.keys())) ############################## # END OF INITIAL INPUT CHECK # ############################## # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(parameters.get_dict(), dict_name='parameters') input_params = { k: _lowercase_dict(v, dict_name=k) for k, v in input_params.iteritems() } # set default values. NOTE: this is different from PW/CP for blocked in self._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in input_params: if key in input_params[namelist]: raise InputValidationError( "You cannot specify explicitly the '{}' key in the '{}' " "namelist.".format(key, namelist)) # set to a default if not input_params[namelist]: input_params[namelist] = {} input_params[namelist][key] = value # =================== NAMELISTS AND CARDS ======================== try: namelists_toprint = settings_dict.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " "node, must be a list of strings") except KeyError: # list of namelists not specified; do automatic detection namelists_toprint = self._default_namelists input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME) with open(input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write("&{0}\n".format(namelist_name)) # namelist content; set to {} if not present, so that we leave an # empty namelist namelist = input_params.pop(namelist_name, {}) for k, v in sorted(namelist.iteritems()): infile.write(convert_input_to_namelist_entry(k, v)) infile.write("/\n") # Write remaning text now, if any infile.write(following_text) # Check for specified namelists that are not expected if input_params: raise InputValidationError( "The following namelists are specified in input_params, but are " "not valid namelists for the current type of calculation: " "{}".format(",".join(input_params.keys()))) # copy remote output dir, if specified if parent_calc_folder is not None: if isinstance(parent_calc_folder, RemoteData): parent_calc_out_subfolder = settings_dict.pop( 'PARENT_CALC_OUT_SUBFOLDER', self._INPUT_SUBFOLDER) remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) elif isinstance(parent_calc_folder, FolderData): local_copy_list.append( (parent_calc_folder.get_abs_path(self._INPUT_SUBFOLDER), self._OUTPUT_SUBFOLDER)) elif isinstance(parent_calc_folder, SinglefileData): filename = parent_calc_folder.get_file_abs_path() local_copy_list.append((filename, os.path.basename(filename))) calcinfo = CalcInfo() calcinfo.uuid = self.uuid # Empty command line by default calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list codeinfo = CodeInfo() codeinfo.cmdline_params = settings_dict.pop('CMDLINE', []) codeinfo.stdin_name = self._INPUT_FILE_NAME codeinfo.stdout_name = self._OUTPUT_FILE_NAME codeinfo.code_uuid = code.uuid calcinfo.codes_info = [codeinfo] # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self._OUTPUT_FILE_NAME) settings_retrieve_list = settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += settings_retrieve_list calcinfo.retrieve_list += self._internal_retrieve_list calcinfo.retrieve_singlefile_list = self._retrieve_singlefile_list if settings_dict: try: Parserclass = self.get_parserclass() parser = Parserclass(self) parser_opts = parser.get_parser_settings_key() settings_dict.pop(parser_opts) except ( KeyError, AttributeError ): # the key parser_opts isn't inside the dictionary, or it is set to None raise InputValidationError( "The following keys have been found in " "the settings input node, but were not understood: {}". format(",".join(settings_dict.keys()))) return calcinfo
def _generate_PWCPinputdata(self, parameters, settings_dict, pseudos, structure, kpoints=None): """ This method creates the content of an input file in the PW/CP format. : """ from aiida.common.utils import get_unique_filename, get_suggestion import re local_copy_list_to_append = [] # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(parameters.get_dict(), dict_name='parameters') input_params = { k: _lowercase_dict(v, dict_name=k) for k, v in input_params.iteritems() } # I remove unwanted elements (for the moment, instead, I stop; to change when # we setup a reasonable logging) for blocked in self._blocked_keywords: nl = blocked[0].upper() flag = blocked[1].lower() defaultvalue = None if len(blocked) >= 3: defaultvalue = blocked[2] if nl in input_params: # The following lines is meant to avoid putting in input the # parameters like celldm(*) stripped_inparams = [ re.sub("[(0-9)]", "", _) for _ in input_params[nl].keys() ] if flag in stripped_inparams: raise InputValidationError( "You cannot specify explicitly the '{}' flag in the '{}' " "namelist or card.".format(flag, nl)) if defaultvalue is not None: if nl not in input_params: input_params[nl] = {} input_params[nl][flag] = defaultvalue # Set some variables (look out at the case! NAMELISTS should be uppercase, # internal flag names must be lowercase) if 'CONTROL' not in input_params: input_params['CONTROL'] = {} input_params['CONTROL']['pseudo_dir'] = self._PSEUDO_SUBFOLDER input_params['CONTROL']['outdir'] = self._OUTPUT_SUBFOLDER input_params['CONTROL']['prefix'] = self._PREFIX input_params['CONTROL']['verbosity'] = input_params['CONTROL'].get( 'verbosity', self._default_verbosity) # Set to high if not specified # ============ I prepare the input site data ============= # ------------ CELL_PARAMETERS ----------- cell_parameters_card = "CELL_PARAMETERS angstrom\n" for vector in structure.cell: cell_parameters_card += ("{0:18.10f} {1:18.10f} {2:18.10f}" "\n".format(*vector)) # ------------- ATOMIC_SPECIES ------------ atomic_species_card_list = [] # Keep track of the filenames to avoid to overwrite files # I use a dictionary where the key is the pseudo PK and the value # is the filename I used. In this way, I also use the same filename # if more than one kind uses the same pseudo. pseudo_filenames = {} # I keep track of the order of species kind_names = [] # I add the pseudopotential files to the list of files to be copied for kind in structure.kinds: # This should not give errors, I already checked before that # the list of keys of pseudos and kinds coincides ps = pseudos[kind.name] if kind.is_alloy() or kind.has_vacancies(): raise InputValidationError( "Kind '{}' is an alloy or has " "vacancies. This is not allowed for pw.x input structures." "".format(kind.name)) try: # It it is the same pseudopotential file, use the same filename filename = pseudo_filenames[ps.pk] except KeyError: # The pseudo was not encountered yet; use a new name and # also add it to the local copy list filename = get_unique_filename(ps.filename, pseudo_filenames.values()) pseudo_filenames[ps.pk] = filename # I add this pseudo file to the list of files to copy local_copy_list_to_append.append( (ps.get_file_abs_path(), os.path.join(self._PSEUDO_SUBFOLDER, filename))) kind_names.append(kind.name) atomic_species_card_list.append("{} {} {}\n".format( kind.name.ljust(6), kind.mass, filename)) # I join the lines, but I resort them using the alphabetical order of # species, given by the kind_names list. I also store the mapping_species # list, with the order of species used in the file mapping_species, sorted_atomic_species_card_list = zip( *sorted(zip(kind_names, atomic_species_card_list))) # The format of mapping_species required later is a dictionary, whose # values are the indices, so I convert to this format # Note the (idx+1) to convert to fortran 1-based lists mapping_species = { sp_name: (idx + 1) for idx, sp_name in enumerate(mapping_species) } # I add the first line sorted_atomic_species_card_list = ( ["ATOMIC_SPECIES\n"] + list(sorted_atomic_species_card_list)) atomic_species_card = "".join(sorted_atomic_species_card_list) # Free memory del sorted_atomic_species_card_list del atomic_species_card_list # ------------ ATOMIC_POSITIONS ----------- atomic_positions_card_list = ["ATOMIC_POSITIONS angstrom\n"] # Check on validity of FIXED_COORDS fixed_coords_strings = [] fixed_coords = settings_dict.pop('FIXED_COORDS', None) if fixed_coords is None: # No fixed_coords specified: I store a list of empty strings fixed_coords_strings = [""] * len(structure.sites) else: if len(fixed_coords) != len(structure.sites): raise InputValidationError( "Input structure contains {:d} sites, but " "fixed_coords has length {:d}".format( len(structure.sites), len(fixed_coords))) for i, this_atom_fix in enumerate(fixed_coords): if len(this_atom_fix) != 3: raise InputValidationError( "fixed_coords({:d}) has not length three" "".format(i + 1)) for fixed_c in this_atom_fix: if not isinstance(fixed_c, bool): raise InputValidationError( "fixed_coords({:d}) has non-boolean " "elements".format(i + 1)) if_pos_values = [self._if_pos(_) for _ in this_atom_fix] fixed_coords_strings.append( " {:d} {:d} {:d}".format(*if_pos_values)) for site, fixed_coords_string in zip(structure.sites, fixed_coords_strings): atomic_positions_card_list.append( "{0} {1:18.10f} {2:18.10f} {3:18.10f} {4}\n".format( site.kind_name.ljust(6), site.position[0], site.position[1], site.position[2], fixed_coords_string)) atomic_positions_card = "".join(atomic_positions_card_list) del atomic_positions_card_list # Optional ATOMIC_FORCES card atomic_forces = settings_dict.pop('ATOMIC_FORCES', None) if atomic_forces is not None: # Checking that there are as many forces defined as there are sites in the structure if len(atomic_forces) != len(structure.sites): raise InputValidationError( 'Input structure contains {:d} sites, but atomic forces has length {:d}' .format(len(structure.sites), len(atomic_forces))) lines = ['ATOMIC_FORCES\n'] for site, vector in zip(structure.sites, atomic_forces): # Checking that all 3 dimensions are specified: if len(vector) != 3: raise InputValidationError( 'Forces({}) for {} has not length three'.format( vector, site)) lines.append('{0} {1:18.10f} {2:18.10f} {3:18.10f}\n'.format( site.kind_name.ljust(6), *vector)) # Append to atomic_positions_card so that this card will be printed directly after atomic_positions_card += ''.join(lines) del lines # Optional ATOMIC_VELOCITIES card atomic_velocities = settings_dict.pop('ATOMIC_VELOCITIES', None) if atomic_velocities is not None: # Checking that there are as many velocities defined as there are sites in the structure if len(atomic_velocities) != len(structure.sites): raise InputValidationError( 'Input structure contains {:d} sites, but atomic velocities has length {:d}' .format(len(structure.sites), len(atomic_velocities))) lines = ['ATOMIC_VELOCITIES\n'] for site, vector in zip(structure.sites, atomic_velocities): # Checking that all 3 dimensions are specified: if len(vector) != 3: raise InputValidationError( 'Velocities({}) for {} has not length three'.format( vector, site)) lines.append('{0} {1:18.10f} {2:18.10f} {3:18.10f}\n'.format( site.kind_name.ljust(6), *vector)) # Append to atomic_positions_card so that this card will be printed directly after atomic_positions_card += ''.join(lines) del lines # I set the variables that must be specified, related to the system # Set some variables (look out at the case! NAMELISTS should be # uppercase, internal flag names must be lowercase) if 'SYSTEM' not in input_params: input_params['SYSTEM'] = {} input_params['SYSTEM']['ibrav'] = 0 input_params['SYSTEM']['nat'] = len(structure.sites) input_params['SYSTEM']['ntyp'] = len(structure.kinds) # ============ I prepare the k-points ============= if self._use_kpoints: try: mesh, offset = kpoints.get_kpoints_mesh() has_mesh = True force_kpoints_list = settings_dict.pop('FORCE_KPOINTS_LIST', False) if force_kpoints_list: kpoints_list = kpoints.get_kpoints_mesh(print_list=True) num_kpoints = len(kpoints_list) has_mesh = False weights = [1.] * num_kpoints except AttributeError: try: kpoints_list = kpoints.get_kpoints() num_kpoints = len(kpoints_list) has_mesh = False if num_kpoints == 0: raise InputValidationError( "At least one k point must be " "provided for non-gamma calculations") except AttributeError: raise InputValidationError( "No valid kpoints have been found") try: _, weights = kpoints.get_kpoints(also_weights=True) except AttributeError: weights = [1.] * num_kpoints gamma_only = settings_dict.pop("GAMMA_ONLY", False) if gamma_only: if has_mesh: if tuple(mesh) != (1, 1, 1) or tuple(offset) != (0., 0., 0.): raise InputValidationError( "If a gamma_only calculation is requested, the " "kpoint mesh must be (1,1,1),offset=(0.,0.,0.)") else: if (len(kpoints_list) != 1 or tuple(kpoints_list[0]) != tuple(0., 0., 0.)): raise InputValidationError( "If a gamma_only calculation is requested, the " "kpoints coordinates must only be (0.,0.,0.)") kpoints_type = "gamma" elif has_mesh: kpoints_type = "automatic" else: kpoints_type = "crystal" kpoints_card_list = ["K_POINTS {}\n".format(kpoints_type)] if kpoints_type == "automatic": if any([(i != 0. and i != 0.5) for i in offset]): raise InputValidationError("offset list must only be made " "of 0 or 0.5 floats") the_offset = [0 if i == 0. else 1 for i in offset] the_6_integers = list(mesh) + the_offset kpoints_card_list.append("{:d} {:d} {:d} {:d} {:d} {:d}\n" "".format(*the_6_integers)) elif kpoints_type == "gamma": # nothing to be written in this case pass else: kpoints_card_list.append("{:d}\n".format(num_kpoints)) for kpoint, weight in zip(kpoints_list, weights): kpoints_card_list.append( " {:18.10f} {:18.10f} {:18.10f} {:18.10f}" "\n".format(kpoint[0], kpoint[1], kpoint[2], weight)) kpoints_card = "".join(kpoints_card_list) del kpoints_card_list # =================== NAMELISTS AND CARDS ======================== try: namelists_toprint = settings_dict.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " "node, must be a list of strings") except KeyError: # list of namelists not specified; do automatic detection try: control_nl = input_params['CONTROL'] calculation_type = control_nl['calculation'] except KeyError: raise InputValidationError( "No 'calculation' in CONTROL namelist." "It is required for automatic detection of the valid list " "of namelists. Otherwise, specify the list of namelists " "using the NAMELISTS key inside the 'settings' input node") try: namelists_toprint = self._automatic_namelists[calculation_type] except KeyError: sugg_string = get_suggestion(calculation_type, self._automatic_namelists.keys()) raise InputValidationError( "Unknown 'calculation' value in " "CONTROL namelist {}. Otherwise, specify the list of " "namelists using the NAMELISTS inside the 'settings' input " "node".format(sugg_string)) inputfile = "" for namelist_name in namelists_toprint: inputfile += "&{0}\n".format(namelist_name) # namelist content; set to {} if not present, so that we leave an # empty namelist namelist = input_params.pop(namelist_name, {}) for k, v in sorted(namelist.iteritems()): inputfile += convert_input_to_namelist_entry( k, v, mapping=mapping_species) inputfile += "/\n" # Write cards now inputfile += atomic_species_card inputfile += atomic_positions_card if self._use_kpoints: inputfile += kpoints_card inputfile += cell_parameters_card #TODO: write CONSTRAINTS #TODO: write OCCUPATIONS if input_params: raise InputValidationError( "The following namelists are specified in input_params, but are " "not valid namelists for the current type of calculation: " "{}".format(",".join(input_params.keys()))) return inputfile, local_copy_list_to_append
def prepare_for_submission(self, folder): """Create the input files from the input nodes passed to this instance of the `CalcJob`. :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk :return: `aiida.common.datastructures.CalcInfo` instance """ if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} # Check that a pseudo potential was specified for each kind present in the `StructureData` kinds = [kind.name for kind in self.inputs.structure.kinds] if set(kinds) != set(self.inputs.pseudos.keys()): raise exceptions.InputValidationError( 'Mismatch between the defined pseudos and the list of kinds of the structure.\n' 'Pseudos: {};\nKinds: {}'.format(', '.join(list(self.inputs.pseudos.keys())), ', '.join(list(kinds)))) local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] # Create the subfolder that will contain the pseudopotentials folder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True) # Create the subfolder for the output data (sometimes Quantum ESPRESSO codes crash if the folder does not exist) folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) # If present, add also the Van der Waals table to the pseudo dir. Note that the name of the table is not checked # but should be the one expected by Quantum ESPRESSO. if 'vdw_table' in self.inputs: uuid = self.inputs.vdw_table.uuid src_path = self.inputs.vdw_table.filename dst_path = os.path.join(self._PSEUDO_SUBFOLDER, self.inputs.vdw_table.filename) local_copy_list.append((uuid, src_path, dst_path)) if 'hubbard_file' in self.inputs: uuid = self.inputs.hubbard_file.filename src_path = self.inputs.hubbard_file.filename dst_path = self.input_file_name_hubbard_file local_copy_list.append((uuid, src_path, dst_path)) arguments = [ self.inputs.parameters, settings, self.inputs.pseudos, self.inputs.structure, ] if self._use_kpoints: arguments.append(self.inputs.kpoints) input_filecontent, local_copy_pseudo_list = self._generate_PWCPinputdata(*arguments) local_copy_list += local_copy_pseudo_list with folder.open(self.metadata.options.input_filename, 'w') as handle: handle.write(input_filecontent) # operations for restart symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: if 'parent_folder' in self.inputs: # I put the symlink to the old parent ./out folder remote_symlink_list.append(( self.inputs.parent_folder.computer.uuid, os.path.join(self.inputs.parent_folder.get_remote_path(), self._restart_copy_from), self._restart_copy_to )) else: # copy remote output dir, if specified if 'parent_folder' in self.inputs: remote_copy_list.append(( self.inputs.parent_folder.computer.uuid, os.path.join(self.inputs.parent_folder.get_remote_path(), self._restart_copy_from), self._restart_copy_to )) # Create an `.EXIT` file if `only_initialization` flag in `settings` is set to `True` if settings.pop('ONLY_INITIALIZATION', False): with folder.open('{}.EXIT'.format(self._PREFIX), 'w') as handle: handle.write('\n') # Check if specific inputs for the ENVIRON module where specified environ_namelist = settings.pop('ENVIRON', None) if environ_namelist is not None: if not isinstance(environ_namelist, dict): raise exceptions.InputValidationError('ENVIRON namelist should be specified as a dictionary') # We first add the environ flag to the command-line options (if not already present) try: if '-environ' not in settings['CMDLINE']: settings['CMDLINE'].append('-environ') except KeyError: settings['CMDLINE'] = ['-environ'] # To create a mapping from the species to an incremental fortran 1-based index # we use the alphabetical order as in the inputdata generation kind_names = sorted([kind.name for kind in self.inputs.structure.kinds]) mapping_species = {kind_name: (index + 1) for index, kind_name in enumerate(kind_names)} with folder.open(self._ENVIRON_INPUT_FILE_NAME, 'w') as handle: handle.write('&ENVIRON\n') for k, v in sorted(six.iteritems(environ_namelist)): handle.write(convert_input_to_namelist_entry(k, v, mapping=mapping_species)) handle.write('/\n') # Check for the deprecated 'ALSO_BANDS' setting and if present fire a deprecation log message also_bands = settings.pop('ALSO_BANDS', None) if also_bands: self.node.logger.warning( "The '{}' setting is deprecated as bands are now parsed by default. " "If you do not want the bands to be parsed set the '{}' to True {}. " 'Note that the eigenvalue.xml files are also no longer stored in the repository' .format('also_bands', 'no_bands', type(self)) ) calcinfo = datastructures.CalcInfo() calcinfo.uuid = str(self.uuid) # Empty command line by default cmdline_params = settings.pop('CMDLINE', []) # we commented calcinfo.stin_name and added it here in cmdline_params # in this way the mpirun ... pw.x ... < aiida.in # is replaced by mpirun ... pw.x ... -in aiida.in # in the scheduler, _get_run_line, if cmdline_params is empty, it # simply uses < calcinfo.stin_name calcinfo.cmdline_params = (list(cmdline_params) + ['-in', self.metadata.options.input_filename]) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = (list(cmdline_params) + ['-in', self.metadata.options.input_filename]) codeinfo.stdout_name = self.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.metadata.options.output_filename) calcinfo.retrieve_list.extend(self.xml_filepaths) calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += self._internal_retrieve_list # Retrieve the k-point directories with the xml files to the temporary folder # to parse the band eigenvalues and occupations but not to have to save the raw files # if and only if the 'no_bands' key was not set to true in the settings no_bands = settings.pop('NO_BANDS', False) if no_bands is False: xmlpaths = os.path.join(self._OUTPUT_SUBFOLDER, self._PREFIX + '.save', 'K*[0-9]', 'eigenval*.xml') calcinfo.retrieve_temporary_list = [[xmlpaths, '.', 2]] # We might still have parser options in the settings dictionary: pop them. _pop_parser_options(self, settings) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys)) return calcinfo
def _prepare_for_submission(self, tempfolder, inputdict): """ This is the routine to be called when you want to create the input files and related stuff with a plugin. :param tempfolder: a aiida.common.folders.Folder subclass where the plugin should put all its files. :param inputdict: a dictionary with the input nodes, as they would be returned by get_inputdata_dict (without the Code!) """ try: code = inputdict.pop(self.get_linkname('code')) except KeyError: raise InputValidationError( "No code specified for this calculation") local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] try: parameters = inputdict.pop(self.get_linkname('parameters')) except KeyError: raise InputValidationError( "No parameters specified for this calculation") if not isinstance(parameters, ParameterData): raise InputValidationError( "parameters is not of type ParameterData") try: qpoints = inputdict.pop(self.get_linkname('qpoints')) except KeyError: raise InputValidationError( "No qpoints specified for this calculation") if not isinstance(qpoints, KpointsData): raise InputValidationError("qpoints is not of type KpointsData") # Settings can be undefined, and defaults to an empty dictionary. # They will be used for any input that doen't fit elsewhere. settings = inputdict.pop(self.get_linkname('settings'), None) if settings is None: settings_dict = {} else: if not isinstance(settings, ParameterData): raise InputValidationError( "settings, if specified, must be of " "type ParameterData") # Settings converted to uppercase settings_dict = _uppercase_dict(settings.get_dict(), dict_name='settings') parent_calc_folder = inputdict.pop(self.get_linkname('parent_folder'), None) if parent_calc_folder is None: raise InputValidationError( "No parent calculation found, needed to " "compute phonons") # TODO: to be a PwCalculation is not sufficient: it could also be a nscf # calculation that is invalid for phonons if not isinstance(parent_calc_folder, RemoteData): raise InputValidationError("parent_calc_folder, if specified," "must be of type RemoteData") restart_flag = False # extract parent calculation parent_calcs = parent_calc_folder.get_inputs(node_type=JobCalculation) n_parents = len(parent_calcs) if n_parents != 1: raise UniquenessError("Input RemoteData is child of {} " "calculation{}, while it should have " "a single parent".format( n_parents, "" if n_parents == 0 else "s")) parent_calc = parent_calcs[0] # check that it is a valid parent self._check_valid_parent(parent_calc) if not isinstance(parent_calc, PwCalculation): restart_flag = True # Also, the parent calculation must be on the same computer new_comp = self.get_computer() old_comp = parent_calc.get_computer() if (not new_comp.uuid == old_comp.uuid): raise InputValidationError( "PhCalculation must be launched on the same computer" " of the parent: {}".format(old_comp.get_name())) # put by default, default_parent_output_folder = ./out try: default_parent_output_folder = parent_calc._OUTPUT_SUBFOLDER except AttributeError: try: default_parent_output_folder = parent_calc._get_output_folder() except AttributeError: raise InputValidationError("Parent of PhCalculation does not " "have a default output subfolder") #os.path.join( # parent_calc.OUTPUT_SUBFOLDER, # '{}.save'.format(parent_calc.PREFIX)) parent_calc_out_subfolder = settings_dict.pop( 'PARENT_CALC_OUT_SUBFOLDER', default_parent_output_folder) # Here, there should be no other inputs if inputdict: raise InputValidationError("The following input data nodes are " "unrecognized: {}".format( inputdict.keys())) ############################## # END OF INITIAL INPUT CHECK # ############################## # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(parameters.get_dict(), dict_name='parameters') input_params = { k: _lowercase_dict(v, dict_name=k) for k, v in input_params.iteritems() } prepare_for_d3 = settings_dict.pop('PREPARE_FOR_D3', False) if prepare_for_d3: self._blocked_keywords += [('INPUTPH', 'fildrho'), ('INPUTPH', 'drho_star%open'), ('INPUTPH', 'drho_star%ext'), ('INPUTPH', 'drho_star%dir')] # I remove unwanted elements (for the moment, instead, I stop; to change when # we setup a reasonable logging) for nl, flag in self._blocked_keywords: if nl in input_params: if flag in input_params[nl]: raise InputValidationError( "You cannot specify explicitly the '{}' flag in the '{}' " "namelist or card.".format(flag, nl)) # Set some variables (look out at the case! NAMELISTS should be uppercase, # internal flag names must be lowercase) if 'INPUTPH' not in input_params: raise InputValidationError( "No namelist INPUTPH found in input" ) # I cannot decide what to do in the calculation input_params['INPUTPH']['outdir'] = self._OUTPUT_SUBFOLDER input_params['INPUTPH']['iverbosity'] = 1 # in human language 1=high input_params['INPUTPH']['prefix'] = self._PREFIX input_params['INPUTPH'][ 'fildyn'] = self._OUTPUT_DYNAMICAL_MATRIX_PREFIX if prepare_for_d3: input_params['INPUTPH']['fildrho'] = self._DRHO_PREFIX input_params['INPUTPH']['drho_star%open'] = True input_params['INPUTPH']['drho_star%ext'] = self._DRHO_STAR_EXT input_params['INPUTPH']['drho_star%dir'] = self._FOLDER_DRHO # qpoints part try: mesh, offset = qpoints.get_kpoints_mesh() if any([i != 0. for i in offset]): raise NotImplementedError( "Computation of phonons on a mesh with" " non zero offset is not implemented, at the level of ph.x" ) input_params["INPUTPH"]["ldisp"] = True input_params["INPUTPH"]["nq1"] = mesh[0] input_params["INPUTPH"]["nq2"] = mesh[1] input_params["INPUTPH"]["nq3"] = mesh[2] postpend_text = None except AttributeError: # this is the case where no mesh was set. Maybe it's a list try: list_of_points = qpoints.get_kpoints(cartesian=True) except AttributeError as e: # In this case, there are no info on the qpoints at all raise InputValidationError( "Neither a qpoints mesh or a valid " "list of qpoints was found in input", e.message) # change to 2pi/a coordinates lattice_parameter = numpy.linalg.norm(qpoints.cell[0]) list_of_points *= lattice_parameter / (2. * numpy.pi) # add here the list of point coordinates if len(list_of_points) > 1: input_params["INPUTPH"]["qplot"] = True input_params["INPUTPH"]["ldisp"] = True postpend_text = "{}\n".format(len(list_of_points)) for points in list_of_points: postpend_text += "{} {} {} 1\n".format(*points) # Note: the weight is fixed to 1, because ph.x calls these # things weights but they are not such. If they are going to # exist with the meaning of weights, they will be supported else: input_params["INPUTPH"]["ldisp"] = False postpend_text = "" for points in list_of_points: postpend_text += "{} {} {}\n".format(*points) # =================== NAMELISTS ======================== # customized namelists, otherwise not present in the distributed ph code try: namelists_toprint = settings_dict.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " "node, must be a list of strings") except KeyError: # list of namelists not specified in the settings; do automatic detection namelists_toprint = self._compulsory_namelists input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME) # create a folder for the dynamical matrices if not restart_flag: # if it is a restart, it will be copied over tempfolder.get_subfolder(self._FOLDER_DYNAMICAL_MATRIX, create=True) with open(input_filename, 'w') as infile: infile.write('AiiDA calculation\n') for namelist_name in namelists_toprint: infile.write("&{0}\n".format(namelist_name)) # namelist content; set to {} if not present, so that we leave an # empty namelist namelist = input_params.pop(namelist_name, {}) for k, v in sorted(namelist.iteritems()): infile.write(convert_input_to_namelist_entry(k, v)) infile.write("/\n") # add list of qpoints if required if postpend_text is not None: infile.write(postpend_text) #TODO: write nat_todo if input_params: raise InputValidationError( "The following namelists are specified in input_params, but are " "not valid namelists for the current type of calculation: " "{}".format(",".join(input_params.keys()))) # copy the parent scratch symlink = settings_dict.pop('PARENT_FOLDER_SYMLINK', _default_symlink_usage) # a boolean if symlink: # I create a symlink to each file/folder in the parent ./out tempfolder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) remote_symlink_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder, "*"), self._OUTPUT_SUBFOLDER)) # I also create a symlink for the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_symlink_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) #pass else: # here I copy the whole folder ./out remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) # I also copy the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) if restart_flag: # in this case, copy in addition also the dynamical matrices if symlink: remote_symlink_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), self._FOLDER_DYNAMICAL_MATRIX)) else: # copy the dynamical matrices remote_copy_list.append( (parent_calc_folder.get_computer().uuid, os.path.join(parent_calc_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), '.')) # no need to copy the _ph0, since I copied already the whole ./out folder # here we may create an aiida.EXIT file create_exit_file = settings_dict.pop('ONLY_INITIALIZATION', False) if create_exit_file: exit_filename = tempfolder.get_abs_path('{}.EXIT'.format( self._PREFIX)) with open(exit_filename, 'w') as f: f.write('\n') calcinfo = CalcInfo() calcinfo.uuid = self.uuid # Empty command line by default cmdline_params = settings_dict.pop('CMDLINE', []) calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list codeinfo = CodeInfo() codeinfo.cmdline_params = (list(cmdline_params) + ["-in", self._INPUT_FILE_NAME]) codeinfo.stdout_name = self._OUTPUT_FILE_NAME codeinfo.code_uuid = code.uuid calcinfo.codes_info = [codeinfo] # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self._OUTPUT_FILE_NAME) calcinfo.retrieve_list.append(self._FOLDER_DYNAMICAL_MATRIX) calcinfo.retrieve_list.append( os.path.join(self._OUTPUT_SUBFOLDER, '_ph0', '{}.phsave'.format(self._PREFIX), self._OUTPUT_XML_TENSOR_FILE_NAME)) extra_retrieved = settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', []) for extra in extra_retrieved: calcinfo.retrieve_list.append(extra) if settings_dict: raise InputValidationError( "The following keys have been found in " "the settings input node, but were not understood: {}".format( ",".join(settings_dict.keys()))) return calcinfo
def _generate_PWCPinputdata(cls, parameters, settings, pseudos, structure, kpoints=None, use_fractional=False): # pylint: disable=invalid-name """Create the input file in string format for a pw.x or cp.x calculation for the given inputs.""" # pylint: disable=too-many-branches,too-many-statements from aiida.common.utils import get_unique_filename import re local_copy_list_to_append = [] # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(parameters.get_dict(), dict_name='parameters') input_params = { k: _lowercase_dict(v, dict_name=k) for k, v in input_params.items() } # I remove unwanted elements (for the moment, instead, I stop; to change when we setup a reasonable logging) for blocked in cls._blocked_keywords: namelist = blocked[0].upper() flag = blocked[1].lower() defaultvalue = None if len(blocked) >= 3: defaultvalue = blocked[2] if namelist in input_params: # The following lines is meant to avoid putting in input the # parameters like celldm(*) stripped_inparams = [ re.sub('[(0-9)]', '', _) for _ in input_params[namelist].keys() ] if flag in stripped_inparams: raise exceptions.InputValidationError( "You cannot specify explicitly the '{}' flag in the '{}' " 'namelist or card.'.format(flag, namelist)) if defaultvalue is not None: if namelist not in input_params: input_params[namelist] = {} input_params[namelist][flag] = defaultvalue # Set some variables (look out at the case! NAMELISTS should be uppercase, # internal flag names must be lowercase) input_params.setdefault('CONTROL', {}) input_params['CONTROL']['pseudo_dir'] = cls._PSEUDO_SUBFOLDER input_params['CONTROL']['outdir'] = cls._OUTPUT_SUBFOLDER input_params['CONTROL']['prefix'] = cls._PREFIX input_params['CONTROL']['verbosity'] = input_params['CONTROL'].get( 'verbosity', cls._default_verbosity) # ============ I prepare the input site data ============= # ------------ CELL_PARAMETERS ----------- # Specify cell parameters only if 'ibrav' is zero. if input_params.get('SYSTEM', {}).get('ibrav', cls._DEFAULT_IBRAV) == 0: cell_parameters_card = 'CELL_PARAMETERS angstrom\n' for vector in structure.cell: cell_parameters_card += ('{0:18.10f} {1:18.10f} {2:18.10f}' '\n'.format(*vector)) else: cell_parameters_card = '' # ------------- ATOMIC_SPECIES ------------ atomic_species_card_list = [] # Keep track of the filenames to avoid to overwrite files # I use a dictionary where the key is the pseudo PK and the value # is the filename I used. In this way, I also use the same filename # if more than one kind uses the same pseudo. pseudo_filenames = {} # I keep track of the order of species kind_names = [] # I add the pseudopotential files to the list of files to be copied for kind in structure.kinds: # This should not give errors, I already checked before that # the list of keys of pseudos and kinds coincides pseudo = pseudos[kind.name] if kind.is_alloy or kind.has_vacancies: raise exceptions.InputValidationError( "Kind '{}' is an alloy or has " 'vacancies. This is not allowed for pw.x input structures.' ''.format(kind.name)) try: # If it is the same pseudopotential file, use the same filename filename = pseudo_filenames[pseudo.pk] except KeyError: # The pseudo was not encountered yet; use a new name and also add it to the local copy list filename = get_unique_filename(pseudo.filename, list(pseudo_filenames.values())) pseudo_filenames[pseudo.pk] = filename local_copy_list_to_append.append( (pseudo.uuid, pseudo.filename, os.path.join(cls._PSEUDO_SUBFOLDER, filename))) kind_names.append(kind.name) atomic_species_card_list.append( f'{kind.name.ljust(6)} {kind.mass} {filename}\n') # I join the lines, but I resort them using the alphabetical order of # species, given by the kind_names list. I also store the mapping_species # list, with the order of species used in the file mapping_species, sorted_atomic_species_card_list = list( zip(*sorted(zip(kind_names, atomic_species_card_list)))) # The format of mapping_species required later is a dictionary, whose # values are the indices, so I convert to this format # Note the (idx+1) to convert to fortran 1-based lists mapping_species = { sp_name: (idx + 1) for idx, sp_name in enumerate(mapping_species) } # I add the first line sorted_atomic_species_card_list = ( ['ATOMIC_SPECIES\n'] + list(sorted_atomic_species_card_list)) atomic_species_card = ''.join(sorted_atomic_species_card_list) # Free memory del sorted_atomic_species_card_list del atomic_species_card_list # ------------ ATOMIC_POSITIONS ----------- # Check on validity of FIXED_COORDS fixed_coords_strings = [] fixed_coords = settings.pop('FIXED_COORDS', None) if fixed_coords is None: # No fixed_coords specified: I store a list of empty strings fixed_coords_strings = [''] * len(structure.sites) else: if len(fixed_coords) != len(structure.sites): raise exceptions.InputValidationError( 'Input structure contains {:d} sites, but ' 'fixed_coords has length {:d}'.format( len(structure.sites), len(fixed_coords))) for i, this_atom_fix in enumerate(fixed_coords): if len(this_atom_fix) != 3: raise exceptions.InputValidationError( f'fixed_coords({i + 1:d}) has not length three') for fixed_c in this_atom_fix: if not isinstance(fixed_c, bool): raise exceptions.InputValidationError( f'fixed_coords({i + 1:d}) has non-boolean elements' ) if_pos_values = [cls._if_pos(_) for _ in this_atom_fix] fixed_coords_strings.append( ' {:d} {:d} {:d}'.format(*if_pos_values)) abs_pos = [_.position for _ in structure.sites] if use_fractional: import numpy as np atomic_positions_card_list = ['ATOMIC_POSITIONS crystal\n'] coordinates = np.dot(np.array(abs_pos), np.linalg.inv(np.array(structure.cell))) else: atomic_positions_card_list = ['ATOMIC_POSITIONS angstrom\n'] coordinates = abs_pos for site, site_coords, fixed_coords_string in zip( structure.sites, coordinates, fixed_coords_strings): atomic_positions_card_list.append( '{0} {1:18.10f} {2:18.10f} {3:18.10f} {4}\n'.format( site.kind_name.ljust(6), site_coords[0], site_coords[1], site_coords[2], fixed_coords_string)) atomic_positions_card = ''.join(atomic_positions_card_list) del atomic_positions_card_list # Optional ATOMIC_FORCES card atomic_forces = settings.pop('ATOMIC_FORCES', None) if atomic_forces is not None: # Checking that there are as many forces defined as there are sites in the structure if len(atomic_forces) != len(structure.sites): raise exceptions.InputValidationError( 'Input structure contains {:d} sites, but atomic forces has length {:d}' .format(len(structure.sites), len(atomic_forces))) lines = ['ATOMIC_FORCES\n'] for site, vector in zip(structure.sites, atomic_forces): # Checking that all 3 dimensions are specified: if len(vector) != 3: raise exceptions.InputValidationError( f'Forces({vector}) for {site} has not length three') lines.append('{0} {1:18.10f} {2:18.10f} {3:18.10f}\n'.format( site.kind_name.ljust(6), *vector)) # Append to atomic_positions_card so that this card will be printed directly after atomic_positions_card += ''.join(lines) del lines # Optional ATOMIC_VELOCITIES card atomic_velocities = settings.pop('ATOMIC_VELOCITIES', None) if atomic_velocities is not None: # Checking that there are as many velocities defined as there are sites in the structure if len(atomic_velocities) != len(structure.sites): raise exceptions.InputValidationError( 'Input structure contains {:d} sites, but atomic velocities has length {:d}' .format(len(structure.sites), len(atomic_velocities))) lines = ['ATOMIC_VELOCITIES\n'] for site, vector in zip(structure.sites, atomic_velocities): # Checking that all 3 dimensions are specified: if len(vector) != 3: raise exceptions.InputValidationError( f'Velocities({vector}) for {site} has not length three' ) lines.append('{0} {1:18.10f} {2:18.10f} {3:18.10f}\n'.format( site.kind_name.ljust(6), *vector)) # Append to atomic_positions_card so that this card will be printed directly after atomic_positions_card += ''.join(lines) del lines # I set the variables that must be specified, related to the system # Set some variables (look out at the case! NAMELISTS should be # uppercase, internal flag names must be lowercase) input_params.setdefault('SYSTEM', {}) input_params['SYSTEM'].setdefault('ibrav', cls._DEFAULT_IBRAV) ibrav = input_params['SYSTEM']['ibrav'] if ibrav != 0: try: structure_parameters = get_parameters_from_cell( ibrav=ibrav, cell=structure.get_attribute('cell'), tolerance=settings.pop('IBRAV_CELL_TOLERANCE', 1e-6)) except ValueError as exc: raise QEInputValidationError( f'Cannot get structure parameters from cell: {exc}' ) from exc input_params['SYSTEM'].update(structure_parameters) input_params['SYSTEM']['nat'] = len(structure.sites) input_params['SYSTEM']['ntyp'] = len(structure.kinds) # ============ I prepare the k-points ============= if cls._use_kpoints: try: mesh, offset = kpoints.get_kpoints_mesh() has_mesh = True force_kpoints_list = settings.pop('FORCE_KPOINTS_LIST', False) if force_kpoints_list: kpoints_list = kpoints.get_kpoints_mesh(print_list=True) num_kpoints = len(kpoints_list) has_mesh = False weights = [1.] * num_kpoints except AttributeError as exception: try: kpoints_list = kpoints.get_kpoints() num_kpoints = len(kpoints_list) has_mesh = False if num_kpoints == 0: raise exceptions.InputValidationError( 'At least one k point must be provided for non-gamma calculations' ) from exception except AttributeError: raise exceptions.InputValidationError( 'No valid kpoints have been found') from exception try: _, weights = kpoints.get_kpoints(also_weights=True) except AttributeError: weights = [1.] * num_kpoints gamma_only = settings.pop('GAMMA_ONLY', False) if gamma_only: if has_mesh: if tuple(mesh) != (1, 1, 1) or tuple(offset) != (0., 0., 0.): raise exceptions.InputValidationError( 'If a gamma_only calculation is requested, the ' 'kpoint mesh must be (1,1,1),offset=(0.,0.,0.)') else: if (len(kpoints_list) != 1 or tuple(kpoints_list[0]) != tuple(0., 0., 0.)): raise exceptions.InputValidationError( 'If a gamma_only calculation is requested, the ' 'kpoints coordinates must only be (0.,0.,0.)') kpoints_type = 'gamma' elif has_mesh: kpoints_type = 'automatic' else: kpoints_type = 'crystal' kpoints_card_list = [f'K_POINTS {kpoints_type}\n'] if kpoints_type == 'automatic': if any([i not in [0, 0.5] for i in offset]): raise exceptions.InputValidationError( 'offset list must only be made of 0 or 0.5 floats') the_offset = [0 if i == 0. else 1 for i in offset] the_6_integers = list(mesh) + the_offset kpoints_card_list.append('{:d} {:d} {:d} {:d} {:d} {:d}\n' ''.format(*the_6_integers)) elif kpoints_type == 'gamma': # nothing to be written in this case pass else: kpoints_card_list.append(f'{num_kpoints:d}\n') for kpoint, weight in zip(kpoints_list, weights): kpoints_card_list.append( f' {kpoint[0]:18.10f} {kpoint[1]:18.10f} {kpoint[2]:18.10f} {weight:18.10f}\n' ) kpoints_card = ''.join(kpoints_card_list) del kpoints_card_list # =================== NAMELISTS AND CARDS ======================== try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " 'node, must be a list of strings') except KeyError: # list of namelists not specified; do automatic detection try: control_nl = input_params['CONTROL'] calculation_type = control_nl['calculation'] except KeyError as exception: raise exceptions.InputValidationError( "No 'calculation' in CONTROL namelist." 'It is required for automatic detection of the valid list ' 'of namelists. Otherwise, specify the list of namelists ' "using the NAMELISTS key inside the 'settings' input node." ) from exception try: namelists_toprint = cls._automatic_namelists[calculation_type] except KeyError as exception: raise exceptions.InputValidationError( "Unknown 'calculation' value in " 'CONTROL namelist {}. Otherwise, specify the list of ' "namelists using the NAMELISTS inside the 'settings' input " 'node'.format(calculation_type)) from exception inputfile = '' for namelist_name in namelists_toprint: inputfile += f'&{namelist_name}\n' # namelist content; set to {} if not present, so that we leave an empty namelist namelist = input_params.pop(namelist_name, {}) for key, value in sorted(namelist.items()): inputfile += convert_input_to_namelist_entry( key, value, mapping=mapping_species) inputfile += '/\n' # Write cards now inputfile += atomic_species_card inputfile += atomic_positions_card if cls._use_kpoints: inputfile += kpoints_card inputfile += cell_parameters_card # Generate additional cards bases on input parameters and settings that are subclass specific tail = cls._generate_PWCP_input_tail(input_params=input_params, settings=settings) if tail: inputfile += f'\n{tail}' if input_params: raise exceptions.InputValidationError( 'The following namelists are specified in input_params, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(input_params.keys())))) return inputfile, local_copy_list_to_append
def prepare_for_submission(self, folder): """Create the input files from the input nodes passed to this instance of the `CalcJob`. :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk :return: `aiida.common.datastructures.CalcInfo` instance """ if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} following_text = self._get_following_text() # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase if 'parameters' in self.inputs: parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = { k: _lowercase_dict(v, dict_name=k) for k, v in six.iteritems(parameters) } else: parameters = {} # Force default values for blocked keywords. NOTE: this is different from PW/CP for blocked in self._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in parameters: if key in parameters[namelist]: raise exceptions.InputValidationError( "You cannot specify explicitly the '{}' key in the '{}' " 'namelist.'.format(key, namelist)) else: parameters[namelist] = {} parameters[namelist][key] = value # =================== NAMELISTS AND CARDS ======================== try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input node, must be a list of strings" ) except KeyError: # list of namelists not specified; do automatic detection namelists_toprint = self._default_namelists input_filename = self.inputs.metadata.options.input_filename with folder.open(input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write(u'&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(six.iteritems(namelist)): infile.write(convert_input_to_namelist_entry(key, value)) infile.write(u'/\n') # Write remaning text now, if any infile.write(following_text) # Check for specified namelists that are not expected if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) remote_copy_list = [] local_copy_list = [] # copy remote output dir, if specified parent_calc_folder = self.inputs.get('parent_folder', None) if parent_calc_folder is not None: if isinstance(parent_calc_folder, RemoteData): parent_calc_out_subfolder = settings.pop( 'PARENT_CALC_OUT_SUBFOLDER', self._INPUT_SUBFOLDER) remote_copy_list.append( (parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) elif isinstance(parent_calc_folder, FolderData): # TODO: test me, especially with deep relative paths. for filename in parent_calc_folder.list_object_names(): local_copy_list.append( (parent_calc_folder.uuid, filename, os.path.join(self._OUTPUT_SUBFOLDER, filename))) elif isinstance(parent_calc_folder, SinglefileData): # TODO: test me single_file = parent_calc_folder local_copy_list.append((single_file.uuid, single_file.filename, single_file.filename)) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = settings.pop('CMDLINE', []) codeinfo.stdin_name = self.inputs.metadata.options.input_filename codeinfo.stdout_name = self.inputs.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.uuid = str(self.uuid) calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append( self.inputs.metadata.options.output_filename) settings_retrieve_list = settings.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += settings_retrieve_list calcinfo.retrieve_list += self._internal_retrieve_list calcinfo.retrieve_singlefile_list = self._retrieve_singlefile_list # We might still have parser options in the settings dictionary: pop them. _pop_parser_options(self, settings) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError( '`settings` contained unexpected keys: {}'.format( unknown_keys)) return calcinfo