def _validate_input_output_names(self): """ This function validates the input and output file names given in the settings Dict. """ # Let's check that the user-specified input filename ends with .win if not self.inputs.metadata.options.input_filename.endswith( self._REQUIRED_INPUT_SUFFIX): raise exc.InputValidationError( "The input filename for Wannier90 (specified in the metadata.options.input_filename) " "must end with .win, you specified instead '{}'".format( self.inputs.metadata.options.input_filename)) # The output filename is defined by Wannier90 based on the seedname. # In AiiDA, the output_filename needs to be specified as a metadata.option to allow for # `verdi calcjob outputcat` to work correctly. Here we check that, if the users manually changed # the input_filename, they also changed the output_filename accordingly expected_output_filename = self._SEEDNAME + ".wout" if self.inputs.metadata.options.output_filename != expected_output_filename: raise exc.InputValidationError( "The output filename specified is wrong. You probably changed the metadata.options.input_filename " "but you forgot to adapt the metadata.options.output_filename accordingly! Currently, you have: " "input_filename: '{}', output_filename: '{}', while I would expect '{}'" .format(self.inputs.metadata.options.input_filename, self.inputs.metadata.options.output_filename, expected_output_filename))
def validate_resources(resources, ctx): """Validate the resources. :raises InputValidationError: if `num_machines` is not specified or is not an integer. """ if resources is not plumpy.UNSPECIFIED: if 'num_machines' not in resources: raise exceptions.InputValidationError('the `resources` input has to at least include `num_machines`.') if not isinstance(resources['num_machines'], int): raise exceptions.InputValidationError('the input `resources.num_machines` shoud be an integer.')
def _add_parallelization_flags_to_cmdline_params(self, cmdline_params): """Get the command line parameters with added parallelization flags. Adds the parallelization flags to the given `cmdline_params` and returns the updated list. Raises an `InputValidationError` if multiple aliases to the same flag are given in `cmdline_params`, or the same flag is given both in `cmdline_params` and the explicit `parallelization` input. """ cmdline_params_res = copy.deepcopy(cmdline_params) # The `cmdline_params_normalized` are used only here to check # for existing parallelization flags. cmdline_params_normalized = [] for param in cmdline_params: cmdline_params_normalized.extend(param.split()) if 'parallelization' in self.inputs: parallelization_dict = self.inputs.parallelization.get_dict() else: parallelization_dict = {} # To make the order of flags consistent and "nice", we use the # ordering from the flag definition. for flag_name in self._ENABLED_PARALLELIZATION_FLAGS: all_aliases = list( self._PARALLELIZATION_FLAG_ALIASES[flag_name]) + [flag_name] aliases_in_cmdline = [ alias for alias in all_aliases if f'-{alias}' in cmdline_params_normalized ] if aliases_in_cmdline: if len(aliases_in_cmdline) > 1: raise exceptions.InputValidationError( f'Conflicting parallelization flags {aliases_in_cmdline} ' "in settings['CMDLINE']") if flag_name in parallelization_dict: raise exceptions.InputValidationError( f"Parallelization flag '{aliases_in_cmdline[0]}' specified in settings['CMDLINE'] conflicts " f"with '{flag_name}' in the 'parallelization' input.") else: warnings.warn( "Specifying the parallelization flags through settings['CMDLINE'] is " "deprecated, use the 'parallelization' input instead.", AiidaDeprecationWarning) continue if flag_name in parallelization_dict: flag_value = parallelization_dict[flag_name] cmdline_params_res += [f'-{flag_name}', str(flag_value)] return cmdline_params_res
def get(cls, pk=None, label=None, machinename=None): """ Get a Computer object with given identifier string, that can either be the numeric ID (pk), or the label (and computername) (if unique). :param pk: the numeric ID (pk) for code :param label: the code label identifying the code to load :param machinename: the machine name where code is setup :raise aiida.common.NotExistent: if no code identified by the given string is found :raise aiida.common.MultipleObjectsError: if the string cannot identify uniquely a code :raise aiida.common.InputValidationError: if neither a pk nor a label was passed in """ # pylint: disable=arguments-differ from aiida.orm.utils import load_code # first check if code pk is provided if pk: code_int = int(pk) try: return load_code(pk=code_int) except exceptions.NotExistent: raise ValueError('{} is not valid code pk'.format(pk)) except exceptions.MultipleObjectsError: raise exceptions.MultipleObjectsError( "More than one code in the DB with pk='{}'!".format(pk)) # check if label (and machinename) is provided elif label is not None: return cls.get_code_helper(label, machinename) else: raise exceptions.InputValidationError( 'Pass either pk or code label (and machinename)')
def _generate_PWCP_input_tail(*args, **kwargs): """Parse CP specific input parameters.""" settings = kwargs['settings'] # AUTOPILOT autopilot = settings.pop('AUTOPILOT', []) if not autopilot: return '' autopilot_card = 'AUTOPILOT\n' try: for event in autopilot: if isinstance(event['newvalue'], str): autopilot_card += f"ON_STEP = {event['onstep']} : '{event['what']}' = {event['newvalue']}\n" else: autopilot_card += f"ON_STEP = {event['onstep']} : {event['what']} = {event['newvalue']}\n" except KeyError as exception: raise exceptions.InputValidationError( f"""AUTOPILOT input: you must specify a list of dictionaries like the following: [ {{'onstep' : 10, 'what' : 'dt', 'newvalue' : 5.0 }}, {{'onstep' : 20, 'what' : 'whatever', 'newvalue' : 'pippo'}} ] You specified {autopilot} """) from exception autopilot_card += 'ENDRULES\n' return autopilot_card
def calculate_invariant_with_parities(dimensionality: orm.Int, scf_out_params: orm.Dict, par_data: orm.ArrayData) -> orm.Dict: """Calculate the z2 invariant from the parities using the output of a BandsxCalculation.""" dim = dimensionality.value parities = par_data.get_array('par') n_el = int(scf_out_params.get_dict()['number_of_electrons']) if dim == 2: x = 1 for p in parities: delta = 1 for i in range(0, n_el, 2): delta *= p[i] x *= delta if x == 1: res = {'nu': 0} elif x == -1: res = {'nu': 1} else: res = {'nu': -1} # raise exceptions.OutputParsingError( # 'Invalid result for z2 using parities') elif dim == 3: raise NotImplemented('dimensionality = 3 not implemented.') else: raise exceptions.InputValidationError( 'dimensionality must be either 2 or 3') return orm.Dict(dict=res)
def _check_reserved_keywords(self, parameters): provided_reserved_kws = [] for kw in parameters: if kw in _RESERVED_KEYWORDS: provided_reserved_kws.append(kw) if provided_reserved_kws: msg = f'The reserved keywords {", ".join(provided_reserved_kws)} were specified but should not be provided.' raise exceptions.InputValidationError(msg)
def label(self, value): """Set the label. :param value: the new value to set """ if '@' in str(value): msg = "Code labels must not contain the '@' symbol" raise exceptions.InputValidationError(msg) super(Code, self.__class__).label.fset(self, value) # pylint: disable=no-member
def _validate_parameters(self): """Validate the 'parameters' input `Dict` node. Check that no blocked keywords are present. """ keyword_intersection = set(self.inputs.parameters.keys()) & set(self._BLOCKED_KEYWORDS) if len(keyword_intersection) > 0: raise exceptions.InputValidationError( f"Some blocked input keywords were provided: {', '.join(list(keyword_intersection))}" )
def _validate_pseudos(self): """Validate the 'pseudos' input namespace. Check that each 'kind' in the input `StructureData` has a corresponding pseudopotential. """ kinds = [kind.name for kind in self.inputs.structure.kinds] if set(kinds) != set(self.inputs.pseudos.keys()): raise exceptions.InputValidationError( 'Mismatch between the defined pseudos and the list of kinds of the structure.\n' 'Pseudos: {};\nKinds:{}'.format(', '.join(list(self.inputs.pseudos.keys())), ', '.join(list(kinds))) )
def validate_parser(parser_name, ctx): """Validate the parser. :raises InputValidationError: if the parser name does not correspond to a loadable `Parser` class. """ from aiida.plugins import ParserFactory if parser_name is not plumpy.UNSPECIFIED: try: ParserFactory(parser_name) except exceptions.EntryPointError as exception: raise exceptions.InputValidationError('invalid parser specified: {}'.format(exception))
def validate_calc_job(inputs, ctx): """Validate the entire set of inputs passed to the `CalcJob` constructor. Reasons that will cause this validation to raise an `InputValidationError`: * No `Computer` has been specified, neither directly in `metadata.computer` nor indirectly through the `Code` input * The specified computer is not stored * The `Computer` specified in `metadata.computer` is not the same as that of the specified `Code` :raises `~aiida.common.exceptions.InputValidationError`: if inputs are invalid """ try: ctx.get_port('code') ctx.get_port('metadata.computer') except ValueError: # If the namespace no longer contains the `code` or `metadata.computer` ports we skip validation return code = inputs.get('code', None) computer_from_code = code.computer computer_from_metadata = inputs.get('metadata', {}).get('computer', None) if not computer_from_code and not computer_from_metadata: raise exceptions.InputValidationError( 'no computer has been specified in `metadata.computer` nor via `code`.' ) if computer_from_code and not computer_from_code.is_stored: raise exceptions.InputValidationError( 'the Computer<{}> is not stored'.format(computer_from_code)) if computer_from_metadata and not computer_from_metadata.is_stored: raise exceptions.InputValidationError( 'the Computer<{}> is not stored'.format(computer_from_metadata)) if computer_from_code and computer_from_metadata and computer_from_code.uuid != computer_from_metadata.uuid: raise exceptions.InputValidationError( 'Computer<{}> explicitly defined in `metadata.computer is different from ' 'Computer<{}> which is the computer of Code<{}> defined as the `code` input.' .format(computer_from_metadata, computer_from_code, code))
def _validate_inputs(self): if self.inputs.dos_method not in ['tetrahedron', 'gaussian']: raise exceptions.InputValidationError( f'`dos_method` should be `tetrahedron` or `gaussian`, not {self.inputs.dos_method.value}' ) if self.inputs.dos_method == 'gaussian': if 'gaussian_broadening' not in self.inputs: raise exceptions.InputValidationError( '`gaussian_broadening` must be provided if `dos_method` is `gaussian`' ) if self.inputs.dos_type == 'pdos': required_inputs_pdos = [ 'pdos_atom_indices', 'openmx_input_structure', 'openmx_orbital_configurations' ] for required_input in required_inputs_pdos: if required_input not in self.inputs: raise exceptions.InputValidationError( f'`{required_input}` must be provided if `dos_type` is `pdos`' )
def _case_transform_dict(d, dict_name, func_name, transform): from collections import Counter if not isinstance(d, dict): raise TypeError('{} accepts only dictionaries as argument, while you gave {}'.format(func_name, type(d))) new_dict = dict((transform(str(k)), v) for k, v in six.iteritems(d)) if len(new_dict) != len(d): num_items = Counter(transform(str(k)) for k in d.keys()) double_keys = ','.join([k for k, v in num_items if v > 1]) raise exceptions.InputValidationError( "Inside the dictionary '{}' there are the following keys that " 'are repeated more than once when compared case-insensitively: {}.' 'This is not allowed.'.format(dict_name, double_keys)) return new_dict
def _case_transform_dict(dictionary, dict_name, func_name, transform): """Transform the keys of a dictionary and check for transformation-insensitive duplicates.""" if not isinstance(dictionary, dict): raise TypeError( f'{func_name} accepts only dictionaries as argument, got {type(dictionary)}' ) new_dict = dict((transform(str(k)), v) for k, v in dictionary.items()) if len(new_dict) != len(dictionary): num_items = Counter(transform(str(k)) for k in dictionary.keys()) double_keys = ','.join([k for k, v in num_items if v > 1]) raise exceptions.InputValidationError( "Inside the dictionary '{}' there are the following keys that " 'are repeated more than once when compared case-insensitively: {}.' 'This is not allowed.'.format(dict_name, double_keys)) return new_dict
def get_column(self, colname, alias): # pylint: disable=no-self-use """ Return the column for a given projection. """ try: return getattr(alias, colname) except AttributeError: raise exceptions.InputValidationError( '{} is not a column of {}\n' 'Valid columns are:\n' '{}'.format( colname, alias, '\n'.join(alias._sa_class_manager.mapper.c.keys()) # pylint: disable=protected-access ))
def _validate_lowercase(dictionary): """ This function gets a dictionary and checks that all keys are lower-case. :param dict_node: a dictionary :raises InputValidationError: if any of the keys is not lower-case :return: ``None`` if validation passes """ non_lowercase = [] for key in dictionary: if key != key.lower(): non_lowercase.append(key) if non_lowercase: raise exc.InputValidationError( "input keys to the Wannier90 plugin must be all lower-case, but the following aren't : {}" .format(", ".join(non_lowercase)))
def set_blocked_keywords(cls, parameters): """Force default values for blocked keywords. NOTE: this is different from PW/CP.""" for blocked in cls._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in parameters: if key in parameters[namelist]: raise exceptions.InputValidationError( "You cannot specify explicitly the '{}' key in the '{}' " 'namelist.'.format(key, namelist)) else: parameters[namelist] = {} parameters[namelist][key] = value return parameters
def _get_local_input_file_lists(self, input_file_specs, optional_file_globs): """ Generate the lists of input files for the case of a local input folder. """ # It is possible that the same file is matched multiple times, # for example if a globbing pattern matches an explicit filename. # To avoid copying the same file twice (which could be costly), # we keep track of which local folder content is already in the # list of files to copy. # To avoid false errors, the explicit files (and thus the required # ones) are checked first, before doing pattern-matching. local_folder_content = set( self.inputs.local_input_folder.list_object_names()) def _get_local_file_info(filename): return (self.inputs.local_input_folder.uuid, filename, filename) local_copy_list = [] not_found = [] for file_spec in input_file_specs: filename = self._SEEDNAME + file_spec.suffix if filename in local_folder_content: local_copy_list.append(_get_local_file_info(filename)) local_folder_content.remove(filename) elif file_spec.required: not_found.append(filename) if not_found: raise exc.InputValidationError( "{} necessary input files were not found: {} (NOTE: if you " "wanted to run a preprocess step, remember to pass " "postproc_setup=True in the input settings node)".format( len(not_found), ', '.join(str(nf) for nf in not_found))) for pattern in optional_file_globs: matched_files = fnmatch.filter(local_folder_content, pattern) local_folder_content -= set(matched_files) local_copy_list.extend( _get_local_file_info(filename) for filename in matched_files) return _InputFileLists(local_copy_list=local_copy_list, remote_copy_list=[], remote_symlink_list=[])
def _validate_input_parameters(self, parameters): """ This function gets a dictionary with the content of the parameters Dict passed by the user and performs some validation. In particular, it checks that there are no blocked parameters keys passed. :param dict_node: a dictionary :raises InputValidationError: if any of the validation fails :return: ``None`` if validation passes """ existing_blocked_keys = [] for key in self._BLOCKED_PARAMETER_KEYS: if key in parameters: existing_blocked_keys.append(key) if existing_blocked_keys: raise exc.InputValidationError( 'The following blocked keys were found in the parameters: {}'. format(", ".join(existing_blocked_keys)))
def case_transform_dict_keys(dictionary, dict_name, func_name, transform): """Transform the keys of a dictionary and check for transformation-insensitive duplicates. :param dictionary: dict to transform :param func_name: name of the transformation function used for error messages :param transform: transformation function :returns: dictionary where keys have been converted to strings and transformed :raises: InputValidationError if case-insensitve comparison leads to duplicate keys """ if not isinstance(dictionary, dict): raise TypeError( f'{func_name} accepts only dictionaries as argument, got {type(dictionary)}' ) new_dict = dict((transform(str(k)), v) for k, v in dictionary.items()) if len(new_dict) != len(dictionary): num_items = Counter(transform(str(k)) for k in dictionary.keys()) double_keys = ','.join([k for k, v in num_items if v > 1]) raise exceptions.InputValidationError( "Inside the dictionary '{}' there are the following keys that " 'are repeated more than once when compared case-insensitively: {}.' 'This is not allowed.'.format(dict_name, double_keys)) return new_dict
def generate_trim(structure: orm.StructureData, dimensionality: orm.Int) -> orm.KpointsData: """Generate the TRIM point KpointsData for the given structure and dimensionality.""" from itertools import product null = [0.0] l = [0.0, 0.5] dim = dimensionality.value if dim == 2: grid = np.array(list(product(l, l, null))) elif dim == 3: grid = np.array(list(product(l, l, l))) else: raise exceptions.InputValidationError( 'Invalid dimensionality {}'.format(dim)) res = orm.KpointsData() res.set_cell_from_structure(structure) res.set_kpoints(grid) return res
def filter_namelists(parameters, namelists_toprint, check_remaining=True): """Select only the given namelists from a parameter dict. :param parameters: 'dict' containing the fortran namelists and parameters to be used. :param namelists_toprint: 'list' containing the namelists to be selected from 'parameters'. If a given namelist is not present, an empty dict is used :param check_remaining: 'bool', if True, raise an exception if more namelists other than the ones given in 'namelist_toprint' are present in 'parameters'. :return: 'dict' of namelists. :raise: InputValidationError """ filtered = {} for key in namelists_toprint: # namelist content; set to {} if not present, so that we leave an empty namelist filtered[key] = parameters.pop(key, {}) if parameters and check_remaining: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) return filtered
def prepare_for_submission(self, folder): #pylint: disable=too-many-locals, too-many-statements # noqa: disable=MC0001 """ Routine which creates the input file of Wannier90 :param folder: a aiida.common.folders.Folder subclass where the plugin should put all its files. """ self._validate_input_output_names() param_dict = self.inputs.parameters.get_dict() self._validate_lowercase(param_dict) self._validate_input_parameters(param_dict) if 'settings' in self.inputs: settings_dict = self.inputs.settings.get_dict() else: settings_dict = {} self._validate_lowercase(settings_dict) pp_setup = settings_dict.pop('postproc_setup', False) if pp_setup: param_dict.update({'postproc_setup': True}) has_local_input = 'local_input_folder' in self.inputs has_remote_input = 'remote_input_folder' in self.inputs if pp_setup: if has_local_input or has_local_input: raise exc.InputValidationError( "Can not set 'local_input_folder' or 'remote_input_folder' " "with the 'postproc_setup' option.") else: if has_local_input and has_remote_input: raise exc.InputValidationError( "Both the 'local_input_folder' and 'remote_input_folder' " "inputs are set, but they are exclusive. Exactly one of " "the two must be given.") if not (has_local_input or has_remote_input): raise exc.InputValidationError( "None of the 'local_input_folder' and 'remote_input_folder' " "inputs is set. Exactly one of the two must be given.") ############################################################ # End basic check on inputs ############################################################ random_projections = settings_dict.pop('random_projections', False) write_win( filename=folder.get_abs_path('{}.win'.format(self._SEEDNAME)), parameters=param_dict, structure=self.inputs.structure, kpoints=self.inputs.kpoints, kpoint_path=getattr(self.inputs, 'kpoint_path', None), projections=getattr(self.inputs, 'projections', None), random_projections=random_projections, ) input_file_lists = self._get_input_file_lists(pp_setup=pp_setup) ####################################################################### calcinfo = datastructures.CalcInfo() calcinfo.uuid = self.uuid calcinfo.local_copy_list = input_file_lists.local_copy_list + settings_dict.pop( "additional_local_copy_list", []) calcinfo.remote_copy_list = input_file_lists.remote_copy_list + settings_dict.pop( "additional_remote_copy_list", []) calcinfo.remote_symlink_list = input_file_lists.remote_symlink_list + settings_dict.pop( "additional_remote_symlink_list", []) codeinfo = datastructures.CodeInfo() codeinfo.code_uuid = self.inputs.code.uuid codeinfo.cmdline_params = [self._SEEDNAME] calcinfo.codes_info = [codeinfo] calcinfo.codes_run_mode = datastructures.CodeRunMode.SERIAL retrieve_list = [ self._SEEDNAME + suffix for suffix in self._DEFAULT_RETRIEVE_SUFFIXES ] exclude_retrieve_list = settings_dict.pop("exclude_retrieve_list", []) retrieve_list = [ filename for filename in retrieve_list if not any( fnmatch.fnmatch(filename, pattern) for pattern in exclude_retrieve_list) ] calcinfo.retrieve_list = retrieve_list calcinfo.retrieve_temporary_list = [] if pp_setup: # The parser will then put this in a SinglefileData (if present) calcinfo.retrieve_temporary_list.append('{}.nnkp'.format( self._SEEDNAME)) # Retrieves bands automatically, if they are calculated calcinfo.retrieve_list += settings_dict.pop("additional_retrieve_list", []) # pop input keys not used here settings_dict.pop('seedname', None) if settings_dict: raise exc.InputValidationError( "The following keys in settings are unrecognized: {}".format( list(settings_dict.keys()))) return calcinfo
def prepare_for_submission(self, folder): """Create the input files from the input nodes passed to this instance of the `CalcJob`. :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk :return: `aiida.common.datastructures.CalcInfo` instance """ local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} parent_folder = self.inputs.parent_folder parent_calcs = parent_folder.get_incoming( node_class=orm.CalcJobNode).all() if not parent_calcs: raise exceptions.NotExistent( 'parent_folder<{}> has no parent calculation'.format( parent_folder.pk)) elif len(parent_calcs) > 1: raise exceptions.UniquenessError( 'parent_folder<{}> has multiple parent calculations'.format( parent_folder.pk)) parent_calc = parent_calcs[0].node # If the parent calculation is a `PhCalculation` we are restarting restart_flag = parent_calc.process_type == 'aiida.calculations:quantumespresso.ph' # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}' .format(parent_calc.computer.get_name())) # put by default, default_parent_output_folder = ./out try: default_parent_output_folder = parent_calc.process_class._OUTPUT_SUBFOLDER except AttributeError: try: default_parent_output_folder = parent_calc._get_output_folder() except AttributeError: raise exceptions.InputValidationError( 'parent calculation does not have a default output subfolder' ) parent_calc_out_subfolder = settings.pop('PARENT_CALC_OUT_SUBFOLDER', default_parent_output_folder) # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = { k: _lowercase_dict(v, dict_name=k) for k, v in six.iteritems(parameters) } prepare_for_d3 = settings.pop('PREPARE_FOR_D3', False) if prepare_for_d3: self._blocked_keywords += [('INPUTPH', 'fildrho'), ('INPUTPH', 'drho_star%open'), ('INPUTPH', 'drho_star%ext'), ('INPUTPH', 'drho_star%dir')] for namelist, flag in self._blocked_keywords: if namelist in parameters: if flag in parameters[namelist]: raise exceptions.InputValidationError( "Cannot specify explicitly the '{}' flag in the '{}' namelist or card." .format(flag, namelist)) if 'INPUTPH' not in parameters: raise exceptions.InputValidationError( 'required namelist INPUTPH not specified') parameters['INPUTPH']['outdir'] = self._OUTPUT_SUBFOLDER parameters['INPUTPH']['iverbosity'] = 1 parameters['INPUTPH']['prefix'] = self._PREFIX parameters['INPUTPH']['fildyn'] = self._OUTPUT_DYNAMICAL_MATRIX_PREFIX if prepare_for_d3: parameters['INPUTPH']['fildrho'] = self._DRHO_PREFIX parameters['INPUTPH']['drho_star%open'] = True parameters['INPUTPH']['drho_star%ext'] = self._DRHO_STAR_EXT parameters['INPUTPH']['drho_star%dir'] = self._FOLDER_DRHO try: mesh, offset = self.inputs.qpoints.get_kpoints_mesh() if any([i != 0. for i in offset]): raise NotImplementedError( 'Computation of phonons on a mesh with non zero offset is not implemented, at the level of ph.x' ) parameters['INPUTPH']['ldisp'] = True parameters['INPUTPH']['nq1'] = mesh[0] parameters['INPUTPH']['nq2'] = mesh[1] parameters['INPUTPH']['nq3'] = mesh[2] postpend_text = None except AttributeError: # this is the case where no mesh was set. Maybe it's a list try: list_of_points = self.inputs.qpoints.get_kpoints( cartesian=True) except AttributeError: # In this case, there are no info on the qpoints at all raise exceptions.InputValidationError( 'Input `qpoints` contains neither a mesh nor a list of points' ) # change to 2pi/a coordinates lattice_parameter = numpy.linalg.norm(self.inputs.qpoints.cell[0]) list_of_points *= lattice_parameter / (2. * numpy.pi) # add here the list of point coordinates if len(list_of_points) > 1: parameters['INPUTPH']['qplot'] = True parameters['INPUTPH']['ldisp'] = True postpend_text = u'{}\n'.format(len(list_of_points)) for points in list_of_points: postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f} 1\n'.format( *points) # Note: the weight is fixed to 1, because ph.x calls these # things weights but they are not such. If they are going to # exist with the meaning of weights, they will be supported else: parameters['INPUTPH']['ldisp'] = False postpend_text = u'' for points in list_of_points: postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f}\n'.format( *points) # customized namelists, otherwise not present in the distributed ph code try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " 'node, must be a list of strings') except KeyError: # list of namelists not specified in the settings; do automatic detection namelists_toprint = self._compulsory_namelists # create a folder for the dynamical matrices if not restart_flag: # if it is a restart, it will be copied over folder.get_subfolder(self._FOLDER_DYNAMICAL_MATRIX, create=True) with folder.open(self.metadata.options.input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write(u'&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(six.iteritems(namelist)): infile.write(convert_input_to_namelist_entry(key, value)) infile.write(u'/\n') # add list of qpoints if required if postpend_text is not None: infile.write(postpend_text) if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) # copy the parent scratch symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: # I create a symlink to each file/folder in the parent ./out folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), parent_calc_out_subfolder, '*'), self._OUTPUT_SUBFOLDER)) # I also create a symlink for the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) else: # here I copy the whole folder ./out remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), parent_calc_out_subfolder), self._OUTPUT_SUBFOLDER)) # I also copy the ./pseudo folder # TODO: suppress this when the recover option of QE will be fixed # (bug when trying to find pseudo file) remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._get_pseudo_folder()), self._get_pseudo_folder())) if restart_flag: # in this case, copy in addition also the dynamical matrices if symlink: remote_symlink_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), self._FOLDER_DYNAMICAL_MATRIX)) else: # copy the dynamical matrices # no need to copy the _ph0, since I copied already the whole ./out folder remote_copy_list.append( (parent_folder.computer.uuid, os.path.join(parent_folder.get_remote_path(), self._FOLDER_DYNAMICAL_MATRIX), '.')) # Create an `.EXIT` file if `only_initialization` flag in `settings` is set to `True` if settings.pop('ONLY_INITIALIZATION', False): with folder.open('{}.EXIT'.format(self._PREFIX), 'w') as handle: handle.write('\n') codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = ( list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename]) codeinfo.stdout_name = self.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.uuid = str(self.uuid) calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file and the xml file filepath_xml_tensor = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0', '{}.phsave'.format(self._PREFIX)) calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.metadata.options.output_filename) calcinfo.retrieve_list.append(self._FOLDER_DYNAMICAL_MATRIX) calcinfo.retrieve_list.append( os.path.join(filepath_xml_tensor, self._OUTPUT_XML_TENSOR_FILE_NAME)) calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', []) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError( '`settings` contained unexpected keys: {}'.format( unknown_keys)) return calcinfo
def prepare_for_submission(self, folder): # pylint: disable=too-many-statements,too-many-branches """Prepare the calculation job for submission by transforming input nodes into input files. In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that contains lists of files that need to be copied to the remote machine before job submission, as well as file lists that are to be retrieved after job completion. :param folder: a sandbox folder to temporarily write files on disk. :return: :py:`~aiida.common.datastructures.CalcInfo` instance. """ def test_offset(offset): """Check if the grid has an offset.""" if any([i != 0. for i in offset]): raise NotImplementedError( 'Computation of electron-phonon on a mesh with non zero offset is not implemented, ' 'at the level of epw.x') # pylint: disable=too-many-statements,too-many-branches local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} # Copy nscf folder parent_folder_nscf = self.inputs.parent_folder_nscf parent_calc_nscf = parent_folder_nscf.creator if parent_calc_nscf is None: raise exceptions.NotExistent('parent_folder<{}> has no parent calculation'.format(parent_folder_nscf.pk)) # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc_nscf.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}'.format( parent_calc_nscf.computer.get_name())) # put by default, default_parent_output_folder = ./out parent_calc_out_subfolder_nscf = parent_calc_nscf.process_class._OUTPUT_SUBFOLDER # pylint: disable=protected-access # Now phonon folder parent_folder_ph = self.inputs.parent_folder_ph parent_calc_ph = parent_folder_ph.creator # Also, the parent calculation must be on the same computer if not self.node.computer.uuid == parent_calc_ph.computer.uuid: raise exceptions.InputValidationError( 'Calculation has to be launched on the same computer as that of the parent: {}'.format( parent_calc_ph.computer.get_name())) # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()} if 'INPUTEPW' not in parameters: raise exceptions.InputValidationError('required namelist INPUTEPW not specified') parameters['INPUTEPW']['outdir'] = self._OUTPUT_SUBFOLDER parameters['INPUTEPW']['iverbosity'] = 1 parameters['INPUTEPW']['prefix'] = self._PREFIX try: mesh, offset = self.inputs.qpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nq1'] = mesh[0] parameters['INPUTEPW']['nq2'] = mesh[1] parameters['INPUTEPW']['nq3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the coarse q-point grid') from exception try: mesh, offset = self.inputs.kpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nk1'] = mesh[0] parameters['INPUTEPW']['nk2'] = mesh[1] parameters['INPUTEPW']['nk3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the coarse k-point grid') from exception try: mesh, offset = self.inputs.qfpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nqf1'] = mesh[0] parameters['INPUTEPW']['nqf2'] = mesh[1] parameters['INPUTEPW']['nqf3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the fine q-point grid') from exception try: mesh, offset = self.inputs.kfpoints.get_kpoints_mesh() test_offset(offset) parameters['INPUTEPW']['nkf1'] = mesh[0] parameters['INPUTEPW']['nkf2'] = mesh[1] parameters['INPUTEPW']['nkf3'] = mesh[2] postpend_text = None except NotImplementedError as exception: raise exceptions.InputValidationError('Cannot get the fine k-point grid') from exception # customized namelists, otherwise not present in the distributed epw code try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " 'node, must be a list of strings') except KeyError: # list of namelists not specified in the settings; do automatic detection namelists_toprint = self._compulsory_namelists # create the save folder with dvscf and dyn files. folder.get_subfolder(self._FOLDER_SAVE, create=True) # List of IBZ q-point to be added below EPW. To be removed when removed from EPW. qibz_ar = [] for key, value in sorted(parent_folder_ph.creator.outputs.output_parameters.get_dict().items()): if key.startswith('dynamical_matrix_'): qibz_ar.append(value['q_point']) qibz_node = orm.ArrayData() qibz_node.set_array('qibz', np.array(qibz_ar)) list_of_points = qibz_node.get_array('qibz') # Number of q-point in the irreducible Brillouin Zone. nqpt = len(list_of_points[0, :]) # add here the list of point coordinates if len(list_of_points) > 1: postpend_text = '{} cartesian\n'.format(len(list_of_points)) for points in list_of_points: postpend_text += '{0:18.10f} {1:18.10f} {2:18.10f} \n'.format(*points) with folder.open(self.metadata.options.input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write('&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(namelist.items()): infile.write(convert_input_to_namelist_entry(key, value)) infile.write('/\n') # add list of qpoints if required if postpend_text is not None: infile.write(postpend_text) if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) # copy the parent scratch symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: # I create a symlink to each file/folder in the parent ./out folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) remote_symlink_list.append(( parent_folder_nscf.computer.uuid, os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf, '*'), self._OUTPUT_SUBFOLDER )) else: # here I copy the whole folder ./out remote_copy_list.append(( parent_folder_nscf.computer.uuid, os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf), self._OUTPUT_SUBFOLDER )) prefix = self._PREFIX for iqpt in range(1, nqpt+1): label = str(iqpt) tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-0') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dyn_q0')) tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-'+label) remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dyn_q'+label)) if iqpt == 1: tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.dvscf*') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dvscf_q'+label)) tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.phsave') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/')) else: tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.q_'+label+'/'+prefix+'.dvscf*') remote_copy_list.append(( parent_folder_ph.computer.uuid, os.path.join(parent_folder_ph.get_remote_path(), tmp_path), 'save/'+prefix+'.dvscf_q'+label)) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = (list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename]) codeinfo.stdout_name = self.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.metadata.options.output_filename) calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', []) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys)) return calcinfo
def prepare_for_submission(self, folder): """ This is the routine to be called when you want to create the input files and related stuff with a plugin. :param folder: a aiida.common.folders.Folder subclass where the plugin should put all its files. """ # pylint: disable=too-many-locals,too-many-statements,too-many-branches from aiida.common.utils import validate_list_of_string_tuples from aiida.common.exceptions import ValidationError code = self.inputs.code template = self.inputs.template.get_dict() try: parameters = self.inputs.parameters.get_dict() except AttributeError: parameters = {} input_file_template = template.pop('input_file_template', '') input_file_name = template.pop('input_file_name', None) output_file_name = template.pop('output_file_name', None) cmdline_params_tmpl = template.pop('cmdline_params', []) input_through_stdin = template.pop('input_through_stdin', False) files_to_copy = template.pop('files_to_copy', []) retrieve_temporary_files = template.pop('retrieve_temporary_files', []) if template: raise exceptions.InputValidationError( 'The following keys could not be used in the template node: {}'.format(template.keys())) try: validate_list_of_string_tuples(files_to_copy, tuple_length=2) except ValidationError as exc: raise exceptions.InputValidationError('invalid file_to_copy format: {}'.format(exc)) local_copy_list = [] remote_copy_list = [] for link_name, dest_rel_path in files_to_copy: try: fileobj = self.inputs.files[link_name] except AttributeError: raise exceptions.InputValidationError('You are asking to copy a file link {}, ' 'but there is no input link with such a name'.format(link_name)) if isinstance(fileobj, orm.SinglefileData): local_copy_list.append((fileobj.uuid, fileobj.filename, dest_rel_path)) elif isinstance(fileobj, orm.RemoteData): # can be a folder remote_copy_list.append((fileobj.computer.uuid, fileobj.get_remote_path(), dest_rel_path)) else: raise exceptions.InputValidationError( 'If you ask to copy a file link {}, ' 'it must be either a SinglefileData or a RemoteData; it is instead of type {}'.format( link_name, fileobj.__class__.__name__)) if input_file_name is not None and not input_file_template: raise exceptions.InputValidationError( 'If you give an input_file_name, you must also specify a input_file_template') if input_through_stdin and input_file_name is None: raise exceptions.InputValidationError( 'If you ask for input_through_stdin you have to specify a input_file_name') input_content = input_file_template.format(**parameters) if input_file_name: folder.create_file_from_filelike(io.StringIO(input_content), input_file_name, 'w', encoding='utf8') else: if input_file_template: self.logger.warning('No input file name passed, but a input file template is present') cmdline_params = [i.format(**parameters) for i in cmdline_params_tmpl] calcinfo = CalcInfo() calcinfo.retrieve_list = [] calcinfo.retrieve_temporary_list = [] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list codeinfo = CodeInfo() codeinfo.cmdline_params = cmdline_params if input_through_stdin: codeinfo.stdin_name = input_file_name if output_file_name: codeinfo.stdout_name = output_file_name calcinfo.retrieve_list.append(output_file_name) if retrieve_temporary_files: calcinfo.retrieve_temporary_list = retrieve_temporary_files codeinfo.code_uuid = code.uuid calcinfo.codes_info = [codeinfo] return calcinfo
def prepare_for_submission(self, folder): # pylint: disable=too-many-branches,too-many-statements """Prepare the calculation job for submission by transforming input nodes into input files. In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that contains lists of files that need to be copied to the remote machine before job submission, as well as file lists that are to be retrieved after job completion. :param folder: a sandbox folder to temporarily write files on disk. :return: :py:`~aiida.common.datastructures.CalcInfo` instance. """ # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters') parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()} # Same for settings. if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} # Set default values. NOTE: this is different from PW/CP for blocked in self._blocked_keywords: namelist = blocked[0].upper() key = blocked[1].lower() value = blocked[2] if namelist in parameters: if key in parameters[namelist]: raise exceptions.InputValidationError( "You cannot specify explicitly the '{}' key in the '{}' " 'namelist.'.format(key, namelist)) else: parameters[namelist] = {} parameters[namelist][key] = value # Restrict the plot output to the file types that we want to be able to parse dimension_to_output_format = { 0: 0, # Spherical integration -> Gnuplot, 1D 1: 0, # 1D -> Gnuplot, 1D 2: 7, # 2D -> Gnuplot, 2D 3: 6, # 3D -> Gaussian cube 4: 0, # Polar on a sphere -> # Gnuplot, 1D } parameters['PLOT']['output_format'] = dimension_to_output_format[parameters['PLOT']['iflag']] namelists_toprint = self._default_namelists input_filename = self.inputs.metadata.options.input_filename with folder.open(input_filename, 'w') as infile: for namelist_name in namelists_toprint: infile.write('&{0}\n'.format(namelist_name)) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = parameters.pop(namelist_name, {}) for key, value in sorted(namelist.items()): infile.write(convert_input_to_namelist_entry(key, value)) infile.write('/\n') # Check for specified namelists that are not expected if parameters: raise exceptions.InputValidationError( 'The following namelists are specified in parameters, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(parameters.keys())))) remote_copy_list = [] local_copy_list = [] # Copy remote output dir parent_calc_folder = self.inputs.get('parent_folder', None) if isinstance(parent_calc_folder, orm.RemoteData): remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), self._INPUT_SUBFOLDER), self._OUTPUT_SUBFOLDER )) remote_copy_list.append(( parent_calc_folder.computer.uuid, os.path.join(parent_calc_folder.get_remote_path(), self._PSEUDO_SUBFOLDER), self._PSEUDO_SUBFOLDER )) elif isinstance(parent_calc_folder, orm.FolderData): for filename in parent_calc_folder.list_object_names(): local_copy_list.append(( parent_calc_folder.uuid, filename, os.path.join(self._OUTPUT_SUBFOLDER, filename) )) local_copy_list.append(( parent_calc_folder.uuid, filename, os.path.join(self._PSEUDO_SUBFOLDER, filename) )) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = settings.pop('CMDLINE', []) codeinfo.stdin_name = self.inputs.metadata.options.input_filename codeinfo.stdout_name = self.inputs.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo = datastructures.CalcInfo() calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list # Retrieve by default the output file calcinfo.retrieve_list = [self.inputs.metadata.options.output_filename] calcinfo.retrieve_temporary_list = [] # Depending on the `plot_num` and the corresponding parameters, more than one pair of `filplot` + `fileout` # files may be written. In that case, the data files will have `filplot` as a prefix with some suffix to # distinguish them from one another. The `fileout` filename will be the full data filename with the `fileout` # value as a suffix. retrieve_tuples = [ self._FILEOUT, ('{}_*{}'.format(self._FILPLOT, self._FILEOUT), '.', 0) ] if self.inputs.metadata.options.keep_plot_file: calcinfo.retrieve_list.extend(retrieve_tuples) else: calcinfo.retrieve_temporary_list.extend(retrieve_tuples) return calcinfo
def _generate_PWCPinputdata(cls, parameters, settings, pseudos, structure, kpoints=None, use_fractional=False): """Create the input file in string format for a pw.x or cp.x calculation for the given inputs.""" from aiida.common.utils import get_unique_filename import re local_copy_list_to_append = [] # I put the first-level keys as uppercase (i.e., namelist and card names) # and the second-level keys as lowercase # (deeper levels are unchanged) input_params = _uppercase_dict(parameters.get_dict(), dict_name='parameters') input_params = {k: _lowercase_dict(v, dict_name=k) for k, v in six.iteritems(input_params)} # I remove unwanted elements (for the moment, instead, I stop; to change when we setup a reasonable logging) for blocked in cls._blocked_keywords: nl = blocked[0].upper() flag = blocked[1].lower() defaultvalue = None if len(blocked) >= 3: defaultvalue = blocked[2] if nl in input_params: # The following lines is meant to avoid putting in input the # parameters like celldm(*) stripped_inparams = [re.sub('[(0-9)]', '', _) for _ in input_params[nl].keys()] if flag in stripped_inparams: raise exceptions.InputValidationError( "You cannot specify explicitly the '{}' flag in the '{}' " 'namelist or card.'.format(flag, nl)) if defaultvalue is not None: if nl not in input_params: input_params[nl] = {} input_params[nl][flag] = defaultvalue # Set some variables (look out at the case! NAMELISTS should be uppercase, # internal flag names must be lowercase) input_params.setdefault('CONTROL', {}) input_params['CONTROL']['pseudo_dir'] = cls._PSEUDO_SUBFOLDER input_params['CONTROL']['outdir'] = cls._OUTPUT_SUBFOLDER input_params['CONTROL']['prefix'] = cls._PREFIX input_params['CONTROL']['verbosity'] = input_params['CONTROL'].get('verbosity', cls._default_verbosity) # Set to high if not specified # ============ I prepare the input site data ============= # ------------ CELL_PARAMETERS ----------- cell_parameters_card = 'CELL_PARAMETERS angstrom\n' for vector in structure.cell: cell_parameters_card += ('{0:18.10f} {1:18.10f} {2:18.10f}' '\n'.format(*vector)) # ------------- ATOMIC_SPECIES ------------ atomic_species_card_list = [] # Keep track of the filenames to avoid to overwrite files # I use a dictionary where the key is the pseudo PK and the value # is the filename I used. In this way, I also use the same filename # if more than one kind uses the same pseudo. pseudo_filenames = {} # I keep track of the order of species kind_names = [] # I add the pseudopotential files to the list of files to be copied for kind in structure.kinds: # This should not give errors, I already checked before that # the list of keys of pseudos and kinds coincides ps = pseudos[kind.name] if kind.is_alloy or kind.has_vacancies: raise exceptions.InputValidationError("Kind '{}' is an alloy or has " 'vacancies. This is not allowed for pw.x input structures.' ''.format(kind.name)) try: # If it is the same pseudopotential file, use the same filename filename = pseudo_filenames[ps.pk] except KeyError: # The pseudo was not encountered yet; use a new name and also add it to the local copy list filename = get_unique_filename(ps.filename, list(pseudo_filenames.values())) pseudo_filenames[ps.pk] = filename local_copy_list_to_append.append((ps.uuid, ps.filename, os.path.join(cls._PSEUDO_SUBFOLDER, filename))) kind_names.append(kind.name) atomic_species_card_list.append('{} {} {}\n'.format(kind.name.ljust(6), kind.mass, filename)) # I join the lines, but I resort them using the alphabetical order of # species, given by the kind_names list. I also store the mapping_species # list, with the order of species used in the file mapping_species, sorted_atomic_species_card_list = list(zip( *sorted(zip(kind_names, atomic_species_card_list)))) # The format of mapping_species required later is a dictionary, whose # values are the indices, so I convert to this format # Note the (idx+1) to convert to fortran 1-based lists mapping_species = {sp_name: (idx + 1) for idx, sp_name in enumerate(mapping_species)} # I add the first line sorted_atomic_species_card_list = (['ATOMIC_SPECIES\n'] + list( sorted_atomic_species_card_list)) atomic_species_card = ''.join(sorted_atomic_species_card_list) # Free memory del sorted_atomic_species_card_list del atomic_species_card_list # ------------ ATOMIC_POSITIONS ----------- # Check on validity of FIXED_COORDS fixed_coords_strings = [] fixed_coords = settings.pop('FIXED_COORDS', None) if fixed_coords is None: # No fixed_coords specified: I store a list of empty strings fixed_coords_strings = [''] * len(structure.sites) else: if len(fixed_coords) != len(structure.sites): raise exceptions.InputValidationError( 'Input structure contains {:d} sites, but ' 'fixed_coords has length {:d}'.format(len(structure.sites), len(fixed_coords))) for i, this_atom_fix in enumerate(fixed_coords): if len(this_atom_fix) != 3: raise exceptions.InputValidationError( 'fixed_coords({:d}) has not length three' ''.format(i + 1)) for fixed_c in this_atom_fix: if not isinstance(fixed_c, bool): raise exceptions.InputValidationError( 'fixed_coords({:d}) has non-boolean ' 'elements'.format(i + 1)) if_pos_values = [cls._if_pos(_) for _ in this_atom_fix] fixed_coords_strings.append( ' {:d} {:d} {:d}'.format(*if_pos_values)) abs_pos = [_.position for _ in structure.sites] if use_fractional: import numpy as np atomic_positions_card_list = ['ATOMIC_POSITIONS crystal\n'] coordinates = np.dot(np.array(abs_pos), np.linalg.inv(np.array(structure.cell))) else: atomic_positions_card_list = ['ATOMIC_POSITIONS angstrom\n'] coordinates = abs_pos for site, site_coords, fixed_coords_string in zip( structure.sites, coordinates, fixed_coords_strings): atomic_positions_card_list.append( '{0} {1:18.10f} {2:18.10f} {3:18.10f} {4}\n'.format( site.kind_name.ljust(6), site_coords[0], site_coords[1], site_coords[2], fixed_coords_string)) atomic_positions_card = ''.join(atomic_positions_card_list) del atomic_positions_card_list # Optional ATOMIC_FORCES card atomic_forces = settings.pop('ATOMIC_FORCES', None) if atomic_forces is not None: # Checking that there are as many forces defined as there are sites in the structure if len(atomic_forces) != len(structure.sites): raise exceptions.InputValidationError( 'Input structure contains {:d} sites, but atomic forces has length {:d}'.format( len(structure.sites), len(atomic_forces) ) ) lines = ['ATOMIC_FORCES\n'] for site, vector in zip(structure.sites, atomic_forces): # Checking that all 3 dimensions are specified: if len(vector) != 3: raise exceptions.InputValidationError('Forces({}) for {} has not length three'.format(vector, site)) lines.append('{0} {1:18.10f} {2:18.10f} {3:18.10f}\n'.format(site.kind_name.ljust(6), *vector)) # Append to atomic_positions_card so that this card will be printed directly after atomic_positions_card += ''.join(lines) del lines # Optional ATOMIC_VELOCITIES card atomic_velocities = settings.pop('ATOMIC_VELOCITIES', None) if atomic_velocities is not None: # Checking that there are as many velocities defined as there are sites in the structure if len(atomic_velocities) != len(structure.sites): raise exceptions.InputValidationError( 'Input structure contains {:d} sites, but atomic velocities has length {:d}'.format( len(structure.sites), len(atomic_velocities) ) ) lines = ['ATOMIC_VELOCITIES\n'] for site, vector in zip(structure.sites, atomic_velocities): # Checking that all 3 dimensions are specified: if len(vector) != 3: raise exceptions.InputValidationError('Velocities({}) for {} has not length three'.format(vector, site)) lines.append('{0} {1:18.10f} {2:18.10f} {3:18.10f}\n'.format(site.kind_name.ljust(6), *vector)) # Append to atomic_positions_card so that this card will be printed directly after atomic_positions_card += ''.join(lines) del lines # I set the variables that must be specified, related to the system # Set some variables (look out at the case! NAMELISTS should be # uppercase, internal flag names must be lowercase) input_params.setdefault('SYSTEM', {}) input_params['SYSTEM']['ibrav'] = 0 input_params['SYSTEM']['nat'] = len(structure.sites) input_params['SYSTEM']['ntyp'] = len(structure.kinds) # ============ I prepare the k-points ============= if cls._use_kpoints: try: mesh, offset = kpoints.get_kpoints_mesh() has_mesh = True force_kpoints_list = settings.pop('FORCE_KPOINTS_LIST', False) if force_kpoints_list: kpoints_list = kpoints.get_kpoints_mesh(print_list=True) num_kpoints = len(kpoints_list) has_mesh = False weights = [1.] * num_kpoints except AttributeError: try: kpoints_list = kpoints.get_kpoints() num_kpoints = len(kpoints_list) has_mesh = False if num_kpoints == 0: raise exceptions.InputValidationError( 'At least one k point must be ' 'provided for non-gamma calculations') except AttributeError: raise exceptions.InputValidationError( 'No valid kpoints have been found') try: _, weights = kpoints.get_kpoints(also_weights=True) except AttributeError: weights = [1.] * num_kpoints gamma_only = settings.pop('GAMMA_ONLY', False) if gamma_only: if has_mesh: if tuple(mesh) != (1, 1, 1) or tuple(offset) != ( 0., 0., 0.): raise exceptions.InputValidationError( 'If a gamma_only calculation is requested, the ' 'kpoint mesh must be (1,1,1),offset=(0.,0.,0.)') else: if (len(kpoints_list) != 1 or tuple(kpoints_list[0]) != tuple(0., 0., 0.)): raise exceptions.InputValidationError( 'If a gamma_only calculation is requested, the ' 'kpoints coordinates must only be (0.,0.,0.)') kpoints_type = 'gamma' elif has_mesh: kpoints_type = 'automatic' else: kpoints_type = 'crystal' kpoints_card_list = ['K_POINTS {}\n'.format(kpoints_type)] if kpoints_type == 'automatic': if any([(i != 0. and i != 0.5) for i in offset]): raise exceptions.InputValidationError('offset list must only be made ' 'of 0 or 0.5 floats') the_offset = [0 if i == 0. else 1 for i in offset] the_6_integers = list(mesh) + the_offset kpoints_card_list.append('{:d} {:d} {:d} {:d} {:d} {:d}\n' ''.format(*the_6_integers)) elif kpoints_type == 'gamma': # nothing to be written in this case pass else: kpoints_card_list.append('{:d}\n'.format(num_kpoints)) for kpoint, weight in zip(kpoints_list, weights): kpoints_card_list.append( ' {:18.10f} {:18.10f} {:18.10f} {:18.10f}' '\n'.format(kpoint[0], kpoint[1], kpoint[2], weight)) kpoints_card = ''.join(kpoints_card_list) del kpoints_card_list # =================== NAMELISTS AND CARDS ======================== try: namelists_toprint = settings.pop('NAMELISTS') if not isinstance(namelists_toprint, list): raise exceptions.InputValidationError( "The 'NAMELISTS' value, if specified in the settings input " 'node, must be a list of strings') except KeyError: # list of namelists not specified; do automatic detection try: control_nl = input_params['CONTROL'] calculation_type = control_nl['calculation'] except KeyError: raise exceptions.InputValidationError( "No 'calculation' in CONTROL namelist." 'It is required for automatic detection of the valid list ' 'of namelists. Otherwise, specify the list of namelists ' "using the NAMELISTS key inside the 'settings' input node.") try: namelists_toprint = cls._automatic_namelists[calculation_type] except KeyError: raise exceptions.InputValidationError("Unknown 'calculation' value in " 'CONTROL namelist {}. Otherwise, specify the list of ' "namelists using the NAMELISTS inside the 'settings' input " 'node'.format(calculation_type)) inputfile = u'' for namelist_name in namelists_toprint: inputfile += u'&{0}\n'.format(namelist_name) # namelist content; set to {} if not present, so that we leave an empty namelist namelist = input_params.pop(namelist_name, {}) for key, value in sorted(namelist.items()): inputfile += convert_input_to_namelist_entry(key, value, mapping=mapping_species) inputfile += u'/\n' # Write cards now inputfile += atomic_species_card inputfile += atomic_positions_card if cls._use_kpoints: inputfile += kpoints_card inputfile += cell_parameters_card if input_params: raise exceptions.InputValidationError( 'The following namelists are specified in input_params, but are ' 'not valid namelists for the current type of calculation: ' '{}'.format(','.join(list(input_params.keys())))) return inputfile, local_copy_list_to_append
def prepare_for_submission(self, folder): """Create the input files from the input nodes passed to this instance of the `CalcJob`. :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk :return: `aiida.common.datastructures.CalcInfo` instance """ if 'settings' in self.inputs: settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings') else: settings = {} # Check that a pseudo potential was specified for each kind present in the `StructureData` kinds = [kind.name for kind in self.inputs.structure.kinds] if set(kinds) != set(self.inputs.pseudos.keys()): raise exceptions.InputValidationError( 'Mismatch between the defined pseudos and the list of kinds of the structure.\n' 'Pseudos: {};\nKinds: {}'.format(', '.join(list(self.inputs.pseudos.keys())), ', '.join(list(kinds)))) local_copy_list = [] remote_copy_list = [] remote_symlink_list = [] # Create the subfolder that will contain the pseudopotentials folder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True) # Create the subfolder for the output data (sometimes Quantum ESPRESSO codes crash if the folder does not exist) folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True) # If present, add also the Van der Waals table to the pseudo dir. Note that the name of the table is not checked # but should be the one expected by Quantum ESPRESSO. if 'vdw_table' in self.inputs: uuid = self.inputs.vdw_table.uuid src_path = self.inputs.vdw_table.filename dst_path = os.path.join(self._PSEUDO_SUBFOLDER, self.inputs.vdw_table.filename) local_copy_list.append((uuid, src_path, dst_path)) if 'hubbard_file' in self.inputs: uuid = self.inputs.hubbard_file.filename src_path = self.inputs.hubbard_file.filename dst_path = self.input_file_name_hubbard_file local_copy_list.append((uuid, src_path, dst_path)) arguments = [ self.inputs.parameters, settings, self.inputs.pseudos, self.inputs.structure, ] if self._use_kpoints: arguments.append(self.inputs.kpoints) input_filecontent, local_copy_pseudo_list = self._generate_PWCPinputdata(*arguments) local_copy_list += local_copy_pseudo_list with folder.open(self.metadata.options.input_filename, 'w') as handle: handle.write(input_filecontent) # operations for restart symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage) # a boolean if symlink: if 'parent_folder' in self.inputs: # I put the symlink to the old parent ./out folder remote_symlink_list.append(( self.inputs.parent_folder.computer.uuid, os.path.join(self.inputs.parent_folder.get_remote_path(), self._restart_copy_from), self._restart_copy_to )) else: # copy remote output dir, if specified if 'parent_folder' in self.inputs: remote_copy_list.append(( self.inputs.parent_folder.computer.uuid, os.path.join(self.inputs.parent_folder.get_remote_path(), self._restart_copy_from), self._restart_copy_to )) # Create an `.EXIT` file if `only_initialization` flag in `settings` is set to `True` if settings.pop('ONLY_INITIALIZATION', False): with folder.open('{}.EXIT'.format(self._PREFIX), 'w') as handle: handle.write('\n') # Check if specific inputs for the ENVIRON module where specified environ_namelist = settings.pop('ENVIRON', None) if environ_namelist is not None: if not isinstance(environ_namelist, dict): raise exceptions.InputValidationError('ENVIRON namelist should be specified as a dictionary') # We first add the environ flag to the command-line options (if not already present) try: if '-environ' not in settings['CMDLINE']: settings['CMDLINE'].append('-environ') except KeyError: settings['CMDLINE'] = ['-environ'] # To create a mapping from the species to an incremental fortran 1-based index # we use the alphabetical order as in the inputdata generation kind_names = sorted([kind.name for kind in self.inputs.structure.kinds]) mapping_species = {kind_name: (index + 1) for index, kind_name in enumerate(kind_names)} with folder.open(self._ENVIRON_INPUT_FILE_NAME, 'w') as handle: handle.write('&ENVIRON\n') for k, v in sorted(six.iteritems(environ_namelist)): handle.write(convert_input_to_namelist_entry(k, v, mapping=mapping_species)) handle.write('/\n') # Check for the deprecated 'ALSO_BANDS' setting and if present fire a deprecation log message also_bands = settings.pop('ALSO_BANDS', None) if also_bands: self.node.logger.warning( "The '{}' setting is deprecated as bands are now parsed by default. " "If you do not want the bands to be parsed set the '{}' to True {}. " 'Note that the eigenvalue.xml files are also no longer stored in the repository' .format('also_bands', 'no_bands', type(self)) ) calcinfo = datastructures.CalcInfo() calcinfo.uuid = str(self.uuid) # Empty command line by default cmdline_params = settings.pop('CMDLINE', []) # we commented calcinfo.stin_name and added it here in cmdline_params # in this way the mpirun ... pw.x ... < aiida.in # is replaced by mpirun ... pw.x ... -in aiida.in # in the scheduler, _get_run_line, if cmdline_params is empty, it # simply uses < calcinfo.stin_name calcinfo.cmdline_params = (list(cmdline_params) + ['-in', self.metadata.options.input_filename]) codeinfo = datastructures.CodeInfo() codeinfo.cmdline_params = (list(cmdline_params) + ['-in', self.metadata.options.input_filename]) codeinfo.stdout_name = self.metadata.options.output_filename codeinfo.code_uuid = self.inputs.code.uuid calcinfo.codes_info = [codeinfo] calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list # Retrieve by default the output file and the xml file calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self.metadata.options.output_filename) calcinfo.retrieve_list.extend(self.xml_filepaths) calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', []) calcinfo.retrieve_list += self._internal_retrieve_list # Retrieve the k-point directories with the xml files to the temporary folder # to parse the band eigenvalues and occupations but not to have to save the raw files # if and only if the 'no_bands' key was not set to true in the settings no_bands = settings.pop('NO_BANDS', False) if no_bands is False: xmlpaths = os.path.join(self._OUTPUT_SUBFOLDER, self._PREFIX + '.save', 'K*[0-9]', 'eigenval*.xml') calcinfo.retrieve_temporary_list = [[xmlpaths, '.', 2]] # We might still have parser options in the settings dictionary: pop them. _pop_parser_options(self, settings) if settings: unknown_keys = ', '.join(list(settings.keys())) raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys)) return calcinfo