def _get_kpoints(self, nx, use_symmetry=True): nx = max(1, nx) kpoints = KpointsData() if use_symmetry: kpoints.set_kpoints_mesh([nx, 1, 1], offset=[0.0, 0.0, 0.0]) else: # list kpoints explicitly points = [[r, 0.0, 0.0] for r in np.linspace(0, 0.5, nx)] kpoints.set_kpoints(points) return kpoints
def get_kpointsdata(self): """ Return a KpointsData object based on the data in the input file. This uses all of the data in the input file to do the necessary unit conversion, ect. and then creates an AiiDa KpointsData object. **Note:** If the calculation uses only the gamma k-point (`if self.k_points['type'] == 'gamma'`), it is necessary to also attach a settings node to the calculation with `gamma_only = True`. :return: KpointsData object of the kpoints in the input file :rtype: aiida.orm.data.array.kpoints.KpointsData :raises NotImplementedError: if the kpoints are in a format not yet supported. """ from aiida.orm.data.array.kpoints import KpointsData # Initialize the KpointsData node kpointsdata = KpointsData() # Get the structure using this class's method. structuredata = self.get_structuredata() # Set the structure information of the kpoints node. kpointsdata.set_cell_from_structure(structuredata) # Set the kpoints and weights, doing any necessary units conversion. if self.k_points['type'] == 'crystal': # relative to recip latt vecs kpointsdata.set_kpoints(self.k_points['points'], weights=self.k_points['weights']) elif self.k_points['type'] == 'tpiba': # cartesian; units of 2*pi/alat alat = np.linalg.norm(structuredata.cell[0]) # alat in Angstrom kpointsdata.set_kpoints(np.array(self.k_points['points']) * (2. * np.pi / alat), weights=self.k_points['weights'], cartesian=True) elif self.k_points['type'] == 'automatic': kpointsdata.set_kpoints_mesh(self.k_points['points'], offset=self.k_points['offset']) elif self.k_points['type'] == 'gamma': kpointsdata.set_kpoints_mesh([1, 1, 1]) else: raise NotImplementedError( 'Support for creating KpointsData from input units of {} is' 'not yet implemented'.format(self.k_points['type'])) return kpointsdata
def _legacy_get_explicit_kpoints_path(structure, **kwargs): """ Call the get_explicit_kpoints_path of the legacy implementation :param structure: a StructureData node :param float kpoint_distance: parameter controlling the distance between kpoints. Distance is given in crystal coordinates, i.e. the distance is computed in the space of b1, b2, b3. The distance set will be the closest possible to this value, compatible with the requirement of putting equispaced points between two special points (since extrema are included). :param bool cartesian: if set to true, reads the coordinates eventually passed in value as cartesian coordinates :param float epsilon_length: threshold on lengths comparison, used to get the bravais lattice info :param float epsilon_angle: threshold on angles comparison, used to get the bravais lattice info """ args_recognized = [ 'value', 'kpoint_distance', 'cartesian', 'epsilon_length', 'epsilon_angle' ] args_unknown = set(kwargs).difference(args_recognized) if args_unknown: raise ValueError("unknown arguments {}".format(args_unknown)) point_coords, path, bravais_info, explicit_kpoints, labels = legacy.get_explicit_kpoints_path( cell=structure.cell, pbc=structure.pbc, **kwargs) kpoints = KpointsData() kpoints.set_cell(structure.cell) kpoints.set_kpoints(explicit_kpoints) kpoints.labels = labels parameters = { 'bravais_info': bravais_info, 'point_coords': point_coords, 'path': path, } return { 'parameters': ParameterData(dict=parameters), 'explicit_kpoints': kpoints }
def parse_with_retrieved(self, retrieved): """ Receives in input a dictionary of retrieved nodes. Does all the logic here. """ from aiida.common.exceptions import InvalidOperation import os import glob successful = True # check if I'm not to overwrite anything #state = self._calc.get_state() #if state != calc_states.PARSING: # raise InvalidOperation("Calculation not in {} state" # .format(calc_states.PARSING) ) # retrieve the input parameter calc_input = self._calc.inp.parameters # look for eventual flags of the parser try: parser_opts = self._calc.inp.settings.get_dict()[ self.get_parser_settings_key()] except (AttributeError, KeyError): parser_opts = {} # load the input dictionary # TODO: pass this input_dict to the parser. It might need it. input_dict = self._calc.inp.parameters.get_dict() # Check that the retrieved folder is there try: out_folder = retrieved[self._calc._get_linkname_retrieved()] except KeyError: self.logger.error("No retrieved folder found") return False, () # check what is inside the folder list_of_files = out_folder.get_folder_list() # at least the stdout should exist if not self._calc._OUTPUT_FILE_NAME in list_of_files: self.logger.error("Standard output not found") successful = False return successful, () # if there is something more, I note it down, so to call the raw parser # with the right options # look for xml has_xml = False if self._calc._DATAFILE_XML_BASENAME in list_of_files: has_xml = True # look for bands has_bands = False if glob.glob(os.path.join(out_folder.get_abs_path('.'), 'K*[0-9]')): # Note: assuming format of kpoints subfolder is K*[0-9] has_bands = True # TODO: maybe it can be more general than bands only? out_file = os.path.join(out_folder.get_abs_path('.'), self._calc._OUTPUT_FILE_NAME) xml_file = os.path.join(out_folder.get_abs_path('.'), self._calc._DATAFILE_XML_BASENAME) dir_with_bands = out_folder.get_abs_path('.') # call the raw parsing function parsing_args = [out_file, input_dict, parser_opts] if has_xml: parsing_args.append(xml_file) if has_bands: if not has_xml: self.logger.warning("Cannot parse bands if xml file not " "found") else: parsing_args.append(dir_with_bands) out_dict, trajectory_data, structure_data, raw_successful = parse_raw_output( *parsing_args) # if calculation was not considered failed already, use the new value successful = raw_successful if successful else successful new_nodes_list = [] # I eventually save the new structure. structure_data is unnecessary after this in_struc = self._calc.get_inputs_dict()['structure'] type_calc = input_dict['CONTROL']['calculation'] struc = in_struc if type_calc in ['relax', 'vc-relax', 'md', 'vc-md']: if 'cell' in structure_data.keys(): struc = convert_qe2aiida_structure(structure_data, input_structure=in_struc) new_nodes_list.append( (self.get_linkname_outstructure(), struc)) k_points_list = trajectory_data.pop('k_points', None) k_points_weights_list = trajectory_data.pop('k_points_weights', None) if k_points_list is not None: # build the kpoints object if out_dict['k_points_units'] not in ['2 pi / Angstrom']: raise QEOutputParsingError( 'Error in kpoints units (should be cartesian)') # converting bands into a BandsData object (including the kpoints) kpoints_from_output = KpointsData() kpoints_from_output.set_cell_from_structure(struc) kpoints_from_output.set_kpoints(k_points_list, cartesian=True, weights=k_points_weights_list) kpoints_from_input = self._calc.inp.kpoints try: kpoints_from_input.get_kpoints() except AttributeError: new_nodes_list += [(self.get_linkname_out_kpoints(), kpoints_from_output)] # convert the dictionary into an AiiDA object output_params = ParameterData(dict=out_dict) # return it to the execmanager new_nodes_list.append((self.get_linkname_outparams(), output_params)) if trajectory_data: import numpy from aiida.orm.data.array.trajectory import TrajectoryData from aiida.orm.data.array import ArrayData try: positions = numpy.array( trajectory_data.pop('atomic_positions_relax')) try: cells = numpy.array( trajectory_data.pop('lattice_vectors_relax')) # if KeyError, the MD was at fixed cell except KeyError: cells = numpy.array([in_struc.cell] * len(positions)) symbols = numpy.array( [str(i.kind_name) for i in in_struc.sites]) stepids = numpy.arange( len(positions)) # a growing integer per step # I will insert time parsing when they fix their issues about time # printing (logic is broken if restart is on) traj = TrajectoryData() traj.set_trajectory( stepids=stepids, cells=cells, symbols=symbols, positions=positions, ) for x in trajectory_data.iteritems(): traj.set_array(x[0], numpy.array(x[1])) # return it to the execmanager new_nodes_list.append( (self.get_linkname_outtrajectory(), traj)) except KeyError: # forces in scf calculation (when outputed) arraydata = ArrayData() for x in trajectory_data.iteritems(): arraydata.set_array(x[0], numpy.array(x[1])) # return it to the execmanager new_nodes_list.append( (self.get_linkname_outarray(), arraydata)) return successful, new_nodes_list
def parse_with_retrieved(self, retrieved): """ Parse the output nodes for a PwCalculations from a dictionary of retrieved nodes. Two nodes that are expected are the default 'retrieved' FolderData node which will store the retrieved files permanently in the repository. The second required node is the unstored FolderData node with the temporary retrieved files, which should be passed under the key 'retrieved_temporary_folder_key' of the Parser class. :param retrieved: a dictionary of retrieved nodes """ import os import numpy successful = True # Load the input dictionary parameters = self._calc.inp.parameters.get_dict() # Look for optional settings input node and potential 'parser_options' dictionary within it try: settings = self._calc.inp.settings.get_dict() parser_opts = settings[self.get_parser_settings_key()] except (AttributeError, KeyError): settings = {} parser_opts = {} # Check that the retrieved folder is there try: out_folder = retrieved[self._calc._get_linkname_retrieved()] except KeyError: self.logger.error("No retrieved folder found") return False, () # Verify that the retrieved_temporary_folder is within the arguments if temporary files were specified if self._calc._get_retrieve_temporary_list(): try: temporary_folder = retrieved[self.retrieved_temporary_folder_key] dir_with_bands = temporary_folder.get_abs_path('.') except KeyError: self.logger.error('the {} was not passed as an argument'.format(self.retrieved_temporary_folder_key)) return False, () else: dir_with_bands = None list_of_files = out_folder.get_folder_list() # The stdout is required for parsing if not self._calc._OUTPUT_FILE_NAME in list_of_files: self.logger.error("The standard output file '{}' was not found but is required".format(self._calc._OUTPUT_FILE_NAME)) return False, () # The xml file is required for parsing if not self._calc._DATAFILE_XML_BASENAME in list_of_files: self.logger.error("The xml output file '{}' was not found but is required".format(self._calc._DATAFILE_XML_BASENAME)) successful = False xml_file = None else: xml_file = os.path.join(out_folder.get_abs_path('.'), self._calc._DATAFILE_XML_BASENAME) out_file = os.path.join(out_folder.get_abs_path('.'), self._calc._OUTPUT_FILE_NAME) # Call the raw parsing function parsing_args = [out_file, parameters, parser_opts, xml_file, dir_with_bands] out_dict, trajectory_data, structure_data, bands_data, raw_successful = parse_raw_output(*parsing_args) # If calculation was not considered failed already, use the new value successful = raw_successful if successful else successful # The symmetry info has large arrays, that occupy most of the database. # turns out most of this is due to 64 matrices that are repeated over and over again. # therefore I map part of the results in a list of dictionaries wrote here once and for all # if the parser_opts has a key all_symmetries set to True, I don't reduce it all_symmetries = parser_opts.get('all_symmetries', False) if not all_symmetries: try: if 'symmetries' in out_dict.keys(): old_symmetries = out_dict['symmetries'] new_symmetries = [] for this_sym in old_symmetries: name = this_sym['name'] index = None for i,this in enumerate(self._possible_symmetries): if name in this['name']: index = i if index is None: self.logger.error("Symmetry {} not found".format(name)) new_dict = {} # note: here I lose the information about equivalent # ions and fractional_translation. # They will be present with all_symmetries=True new_dict['t_rev'] = this_sym['t_rev'] new_dict['symmetry_number'] = index new_symmetries.append(new_dict) out_dict['symmetries'] = new_symmetries # and overwrite the old one except KeyError: # no symmetries were parsed (failed case, likely) self.logger.error("No symmetries were found in output") new_nodes_list = [] # I eventually save the new structure. structure_data is unnecessary after this in_struc = self._calc.get_inputs_dict()['structure'] type_calc = parameters['CONTROL']['calculation'] struc = in_struc if type_calc in ['relax', 'vc-relax', 'md', 'vc-md']: if 'cell' in structure_data.keys(): struc = convert_qe2aiida_structure(structure_data, input_structure=in_struc) new_nodes_list.append((self.get_linkname_outstructure(), struc)) k_points_list = trajectory_data.pop('k_points', None) k_points_weights_list = trajectory_data.pop('k_points_weights', None) if k_points_list is not None: # Build the kpoints object if out_dict['k_points_units'] not in ['2 pi / Angstrom']: raise QEOutputParsingError('Error in kpoints units (should be cartesian)') kpoints_from_output = KpointsData() kpoints_from_output.set_cell_from_structure(struc) kpoints_from_output.set_kpoints(k_points_list, cartesian=True, weights=k_points_weights_list) kpoints_from_input = self._calc.inp.kpoints if not bands_data: try: kpoints_from_input.get_kpoints() except AttributeError: new_nodes_list += [(self.get_linkname_out_kpoints(), kpoints_from_output)] # Converting bands into a BandsData object (including the kpoints) if bands_data: kpoints_for_bands = kpoints_from_output try: kpoints_from_input.get_kpoints() kpoints_for_bands.labels = kpoints_from_input.labels except (AttributeError, ValueError, TypeError): # AttributeError: no list of kpoints in input # ValueError: labels from input do not match the output # list of kpoints (some kpoints are missing) # TypeError: labels are not set, so kpoints_from_input.labels=None pass # Get the bands occupations and correct the occupations of QE: # If it computes only one component, it occupies it with half number of electrons try: bands_data['occupations'][1] the_occupations = bands_data['occupations'] except IndexError: the_occupations = 2.*numpy.array(bands_data['occupations'][0]) try: bands_data['bands'][1] bands_energies = bands_data['bands'] except IndexError: bands_energies = bands_data['bands'][0] the_bands_data = BandsData() the_bands_data.set_kpointsdata(kpoints_for_bands) the_bands_data.set_bands(bands_energies, units = bands_data['bands_units'], occupations = the_occupations) new_nodes_list += [('output_band', the_bands_data)] out_dict['linknames_band'] = ['output_band'] # Separate the atomic_occupations dictionary in its own node if it is present atomic_occupations = out_dict.get('atomic_occupations', {}) if atomic_occupations: out_dict.pop('atomic_occupations') atomic_occupations_node = ParameterData(dict=atomic_occupations) new_nodes_list.append(('output_atomic_occupations', atomic_occupations_node)) output_params = ParameterData(dict=out_dict) new_nodes_list.append((self.get_linkname_outparams(), output_params)) if trajectory_data: from aiida.orm.data.array.trajectory import TrajectoryData from aiida.orm.data.array import ArrayData try: positions = numpy.array( trajectory_data.pop('atomic_positions_relax')) try: cells = numpy.array( trajectory_data.pop('lattice_vectors_relax')) # if KeyError, the MD was at fixed cell except KeyError: cells = numpy.array([in_struc.cell] * len(positions)) symbols = numpy.array([str(i.kind_name) for i in in_struc.sites]) stepids = numpy.arange(len(positions)) # a growing integer per step # I will insert time parsing when they fix their issues about time # printing (logic is broken if restart is on) traj = TrajectoryData() traj.set_trajectory( stepids = stepids, cells = cells, symbols = symbols, positions = positions, ) for x in trajectory_data.iteritems(): traj.set_array(x[0],numpy.array(x[1])) new_nodes_list.append((self.get_linkname_outtrajectory(),traj)) except KeyError: # forces, atomic charges and atomic mag. moments, in scf calculation (when outputed) arraydata = ArrayData() for x in trajectory_data.iteritems(): arraydata.set_array(x[0],numpy.array(x[1])) new_nodes_list.append((self.get_linkname_outarray(),arraydata)) return successful, new_nodes_list
def get_explicit_kpoints_path(structure, parameters): """ Return the kpoint path for band structure (in scaled and absolute coordinates), given a crystal structure, using the paths proposed in the various publications (see description of the 'recipe' input parameter). The parameters are the same as get get_explicit_k_path in __init__, but here all structures are input and returned as AiiDA structures rather than tuples, and similarly k-points-related information as a AiiDA KpointsData class. :param structure: The AiiDA StructureData for which we want to obtain the suggested path. :param parameters: A dictionary whose key-value pairs are passed as additional kwargs to the ``seekpath.get_explicit_k_path`` function. :return: A dictionary with four nodes: - ``explicit_kpoints``: a KpointsData with the (explicit) kpoints (with labels set). - ``parameters``: a ParameterData, whose content is the same dictionary as returned by the ``seekpath.get_explicit_k_path`` function (see `seekpath documentation <https://seekpath.readthedocs.io/>`_), except that: - ``conv_lattice``, ``conv_positions``, ``conv_types`` are removed and replaced by the ``conv_structure`` output node - ``primitive_lattice``, ``primitive_positions``, ``primitive_types`` are removed and replaced by the `primitive_structure` output node - ``reciprocal_primitive_lattice``, ``explicit_kpoints_abs``, ``explicit_kpoints_rel`` and ``explicit_kpoints_labels`` are removed and replaced by the ``explicit_kpoints`` output node - ``primitive_structure``: A StructureData with the primitive structure - ``conv_structure``: A StructureData with the primitive structure """ check_seekpath_is_installed() import seekpath structure_tuple, kind_info, kinds = structure_to_spglib_tuple(structure) result = {} rawdict = seekpath.get_explicit_k_path(structure=structure_tuple, **parameters) # Replace primitive structure with AiiDA StructureData primitive_lattice = rawdict.pop('primitive_lattice') primitive_positions = rawdict.pop('primitive_positions') primitive_types = rawdict.pop('primitive_types') primitive_tuple = (primitive_lattice, primitive_positions, primitive_types) primitive_structure = spglib_tuple_to_structure(primitive_tuple, kind_info, kinds) # Replace conv structure with AiiDA StructureData conv_lattice = rawdict.pop('conv_lattice') conv_positions = rawdict.pop('conv_positions') conv_types = rawdict.pop('conv_types') conv_tuple = (conv_lattice, conv_positions, conv_types) conv_structure = spglib_tuple_to_structure(conv_tuple, kind_info, kinds) # Remove reciprocal_primitive_lattice, recalculated by kpoints class rawdict.pop('reciprocal_primitive_lattice') kpoints_abs = rawdict.pop('explicit_kpoints_abs') kpoints_rel = rawdict.pop('explicit_kpoints_rel') kpoints_labels = rawdict.pop('explicit_kpoints_labels') # set_kpoints expects labels like [[0,'X'],[34,'L'],...], so generate it here skipping empty labels labels = [[idx, label] for idx, label in enumerate(kpoints_labels) if label] kpoints = KpointsData() kpoints.set_cell_from_structure(primitive_structure) kpoints.set_kpoints(kpoints_abs, cartesian=True, labels=labels) result['parameters'] = ParameterData(dict=rawdict) result['explicit_kpoints'] = kpoints result['primitive_structure'] = primitive_structure result['conv_structure'] = conv_structure return result
def parse_with_retrieved(self, retrieved): """ Parses the datafolder, stores results. This parser for this simple code does simply store in the DB a node representing the file of phonon frequencies """ from aiida.common.exceptions import InvalidOperation # suppose at the start that the job is successful successful = True new_nodes_list = [] # Check that the retrieved folder is there try: out_folder = retrieved[self._calc._get_linkname_retrieved()] except KeyError: self.logger.error("No retrieved folder found") return False, () # check what is inside the folder list_of_files = out_folder.get_folder_list() # at least the stdout should exist if not self._calc._OUTPUT_FILE_NAME in list_of_files: successful = False self.logger.error("Standard output not found") return successful, () # check that the file has finished (i.e. JOB DONE is inside the file) filpath = out_folder.get_abs_path(self._calc._OUTPUT_FILE_NAME) with open(filpath, 'r') as fil: lines = fil.read() if "JOB DONE" not in lines: successful = False self.logger.error("Computation did not finish properly") # check that the phonon frequencies file is present try: # define phonon frequencies file name phonon_file = out_folder.get_abs_path( self._calc._PHONON_FREQUENCIES_NAME) except OSError: successful = False self.logger.error("File with phonon frequencies not found") return successful, new_nodes_list # extract the kpoints from the input data and create the kpointsdata for bands kpointsdata = self._calc.inp.kpoints try: kpoints = kpointsdata.get_kpoints() kpointsdata_for_bands = kpointsdata.copy() except AttributeError: kpoints = kpointsdata.get_kpoints_mesh(print_list=True) kpointsdata_for_bands = KpointsData() kpointsdata_for_bands.set_kpoints(kpoints) # find the number of kpoints num_kpoints = kpoints.shape[0] # call the raw parsing function parsed_data = parse_raw_matdyn_phonon_file(phonon_file) # extract number of kpoints read from the file (and take out from output # dictionary) try: this_num_kpoints = parsed_data.pop('num_kpoints') except KeyError: successful = False self.logger.error("Wrong number of kpoints") # warning message already in parsed_data return successful, new_nodes_list # check that the number of kpoints from the file is the same as the one # in the input kpoints if num_kpoints != this_num_kpoints: successful = False self.logger.error("Number of kpoints different in input and in " "phonon frequencies file") # extract phonon bands (and take out from output dictionary) phonon_bands = parsed_data.pop('phonon_bands') # save phonon branches into BandsData output_bands = BandsData() output_bands.set_kpointsdata(kpointsdata_for_bands) output_bands.set_bands(phonon_bands, units='THz') # convert the dictionary into an AiiDA object (here only warnings remain) output_params = ParameterData(dict=parsed_data) for message in parsed_data['warnings']: self.logger.error(message) # prepare the list of output nodes to be returned new_nodes_list = [(self.get_linkname_outparams(), output_params), (self.get_linkname_outbands(), output_bands)] return successful, new_nodes_list
def band_parser(band_dat_path, band_kpt_path, special_points, structure): """ Parsers the bands output data, along with the special points retrieved from the input kpoints to construct a BandsData object which is then returned. Cannot handle discontinuities in the kpath, if two points are assigned to same spot only one will be passed. :param band_dat_path: file path to the aiida_band.dat file :param band_kpt_path: file path to the aiida_band.kpt file :param special_points: special points to add labels to the bands a dictionary in the form expected in the input as described in the wannier90 documentation :return: BandsData object constructed from the input params """ import numpy as np from aiida.orm.data.array.bands import BandsData from aiida.orm.data.array.kpoints import KpointsData # imports the data out_kpt = np.genfromtxt(band_kpt_path, skip_header=1, usecols=(0, 1, 2)) out_dat = np.genfromtxt(band_dat_path, usecols=1) # reshaps the output bands out_dat = out_dat.reshape(len(out_kpt), (len(out_dat) / len(out_kpt)), order="F") # finds expected points of discontinuity kpath = special_points['path'] cont_break = [(i, (kpath[i - 1][1], kpath[i][0])) for i in range(1, len(kpath)) if kpath[i - 1][1] != kpath[i][0]] # finds the special points special_points_dict = special_points['point_coords'] labels = [(i, k) for k in special_points_dict for i in range(len(out_kpt)) if all(np.isclose(special_points_dict[k], out_kpt[i]))] labels.sort() # Checks and appends labels if discontinuity appends = [] for x in cont_break: # two cases the break is before or the break is after # if the break is before if labels[x[0]][1] != x[1][0]: # checks to see if the discontinuity was already there if labels[x[0] - 1] == x[1][0]: continue else: insert_point = x[0] new_label = x[1][0] kpoint = labels[x[0]][0] - 1 appends += [[insert_point, new_label, kpoint]] # if the break is after if labels[x[0]][1] != x[1][1]: # checks to see if the discontinuity was already there if labels[x[0] + 1] == x[1][1]: continue else: insert_point = x[0] + 1 new_label = x[1][1] kpoint = labels[x[0]][0] + 1 appends += [[insert_point, new_label, kpoint]] appends.sort() for i in range(len(appends)): append = appends[i] labels.insert(append[0] + i, (append[2], unicode(append[1]))) bands = BandsData() k = KpointsData() k.set_cell_from_structure(structure) k.set_kpoints(out_kpt, cartesian=False) bands.set_kpointsdata(k) bands.set_bands(out_dat, units='eV') bands.labels = labels return bands
def parse_with_retrieved(self, retrieved): """ Parse the output nodes for a PwCalculations from a dictionary of retrieved nodes. Two nodes that are expected are the default 'retrieved' FolderData node which will store the retrieved files permanently in the repository. The second required node is the unstored FolderData node with the temporary retrieved files, which should be passed under the key 'retrieved_temporary_folder_key' of the Parser class. :param retrieved: a dictionary of retrieved nodes """ import os successful = True # Load the input dictionary parameters = self._calc.inp.parameters.get_dict() # Look for optional settings input node and potential 'parser_options' dictionary within it try: settings = self._calc.inp.settings.get_dict() parser_opts = settings[self.get_parser_settings_key()] except (AttributeError, KeyError): settings = {} parser_opts = {} # Check that the retrieved folder is there try: out_folder = retrieved[self._calc._get_linkname_retrieved()] except KeyError: self.logger.error("No retrieved folder found") return False, () # Verify that the retrieved_temporary_folder is within the arguments if temporary files were specified if self._calc._get_retrieve_temporary_list(): try: temporary_folder = retrieved[ self.retrieved_temporary_folder_key] dir_with_bands = temporary_folder.get_abs_path('.') except KeyError: self.logger.error( 'the {} was not passed as an argument'.format( self.retrieved_temporary_folder_key)) return False, () else: dir_with_bands = None list_of_files = out_folder.get_folder_list() # The stdout is required for parsing if self._calc._OUTPUT_FILE_NAME not in list_of_files: self.logger.error( "The standard output file '{}' was not found but is required". format(self._calc._OUTPUT_FILE_NAME)) return False, () # The xml file is required for parsing if self._calc._DATAFILE_XML_BASENAME not in list_of_files: self.logger.error( "The xml output file '{}' was not found but is required". format(self._calc._DATAFILE_XML_BASENAME)) successful = False xml_file = None else: xml_file = os.path.join(out_folder.get_abs_path('.'), self._calc._DATAFILE_XML_BASENAME) out_file = os.path.join(out_folder.get_abs_path('.'), self._calc._OUTPUT_FILE_NAME) # Call the raw parsing function parsing_args = [ out_file, parameters, parser_opts, xml_file, dir_with_bands ] out_dict, trajectory_data, structure_data, bands_data, raw_successful = parse_raw_output( *parsing_args) # If calculation was not considered failed already, use the new value successful = raw_successful if successful else successful # If the parser option 'all_symmetries' is not set to True, we reduce the raw parsed symmetries to safe space all_symmetries = parser_opts.get('all_symmetries', False) if not all_symmetries: # In the standard output, each symmetry operation print two rotation matrices: # # * S_cryst^T: matrix in crystal coordinates, transposed # * S_cart: matrix in cartesian coordinates, # # The XML files only print one matrix: # # * S_cryst: matrix in crystal coordinates # # The raw parsed symmetry information from the XML is large and will load the database heavily if stored as # is for each calculation. Instead, we will map these dictionaries onto a static dictionary of rotation # matrices generated by the _get_qe_symmetry_list static method. This dictionary will return the rotation # matrices in cartesian coordinates, i.e. S_cart. In order to compare the raw matrices from the XML to these # static matrices we have to convert S_cryst into S_cart. We derive here how that is done: # # S_cryst * v_cryst = v_cryst' # # where v_cryst' is the rotated vector v_cryst under S_cryst # We define `cell` where cell vectors are rows. Converting a vector from crystal to cartesian # coordinates is defined as: # # cell^T * v_cryst = v_cart # # The inverse of this operation is defined as # # v_cryst = cell^Tinv * v_cart # # Replacing the last equation into the first we find: # # S_cryst * cell^Tinv * v_cart = cell^Tinv * v_cart' # # Multiply on the left with cell^T gives: # # cell^T * S_cryst * cell^Tinv * v_cart = v_cart' # # which can be rewritten as: # # S_cart * v_cart = v_cart' # # where: # # S_cart = cell^T * S_cryst * cell^Tinv # # We compute here the transpose and its inverse of the structure cell basis, which is needed to transform # the parsed rotation matrices, which are in crystal coordinates, to cartesian coordinates, which are the # matrices that are returned by the _get_qe_symmetry_list staticmethod cell = structure_data['cell']['lattice_vectors'] cell_T = numpy.transpose(cell) cell_Tinv = numpy.linalg.inv(cell_T) try: if 'symmetries' in out_dict.keys(): old_symmetries = out_dict['symmetries'] new_symmetries = [] for this_sym in old_symmetries: name = this_sym['name'].strip() for i, this in enumerate(self._possible_symmetries): # Since we do an exact comparison we strip the string name from whitespace # and as soon as it is matched, we break to prevent it from matching another if name == this['name'].strip(): index = i break else: index = None self.logger.error( 'Symmetry {} not found'.format(name)) new_dict = {} if index is not None: # The raw parsed rotation matrix is in crystal coordinates, whereas the mapped rotation # in self._possible_symmetries is in cartesian coordinates. To allow them to be compared # to make sure we matched the correct rotation symmetry, we first convert the parsed matrix # to cartesian coordinates. For explanation of the method, see comment above. rotation_cryst = this_sym['rotation'] rotation_cart_new = self._possible_symmetries[ index]['matrix'] rotation_cart_old = numpy.dot( cell_T, numpy.dot(rotation_cryst, cell_Tinv)) inversion = self._possible_symmetries[index][ 'inversion'] if not are_matrices_equal( rotation_cart_old, rotation_cart_new, swap_sign_matrix_b=inversion): self.logger.error( 'Mapped rotation matrix {} does not match the original rotation {}' .format(rotation_cart_new, rotation_cart_old)) new_dict['all_symmetries'] = this_sym else: # Note: here I lose the information about equivalent ions and fractional_translation. new_dict['t_rev'] = this_sym['t_rev'] new_dict['symmetry_number'] = index else: new_dict['all_symmetries'] = this_sym new_symmetries.append(new_dict) out_dict[ 'symmetries'] = new_symmetries # and overwrite the old one except KeyError: # no symmetries were parsed (failed case, likely) self.logger.error("No symmetries were found in output") new_nodes_list = [] # I eventually save the new structure. structure_data is unnecessary after this in_struc = self._calc.get_inputs_dict()['structure'] type_calc = parameters['CONTROL']['calculation'] struc = in_struc if type_calc in ['relax', 'vc-relax', 'md', 'vc-md']: if 'cell' in structure_data.keys(): struc = convert_qe2aiida_structure(structure_data, input_structure=in_struc) new_nodes_list.append( (self.get_linkname_outstructure(), struc)) k_points_list = trajectory_data.pop('k_points', None) k_points_weights_list = trajectory_data.pop('k_points_weights', None) if k_points_list is not None: # Build the kpoints object if out_dict['k_points_units'] not in ['2 pi / Angstrom']: raise QEOutputParsingError( 'Error in kpoints units (should be cartesian)') kpoints_from_output = KpointsData() kpoints_from_output.set_cell_from_structure(struc) kpoints_from_output.set_kpoints(k_points_list, cartesian=True, weights=k_points_weights_list) kpoints_from_input = self._calc.inp.kpoints if not bands_data: try: kpoints_from_input.get_kpoints() except AttributeError: new_nodes_list += [(self.get_linkname_out_kpoints(), kpoints_from_output)] # Converting bands into a BandsData object (including the kpoints) if bands_data: kpoints_for_bands = kpoints_from_output try: kpoints_from_input.get_kpoints() kpoints_for_bands.labels = kpoints_from_input.labels except (AttributeError, ValueError, TypeError): # AttributeError: no list of kpoints in input # ValueError: labels from input do not match the output # list of kpoints (some kpoints are missing) # TypeError: labels are not set, so kpoints_from_input.labels=None pass # Get the bands occupations and correct the occupations of QE: # If it computes only one component, it occupies it with half number of electrons try: bands_data['occupations'][1] the_occupations = bands_data['occupations'] except IndexError: the_occupations = 2. * numpy.array( bands_data['occupations'][0]) try: bands_data['bands'][1] bands_energies = bands_data['bands'] except IndexError: bands_energies = bands_data['bands'][0] the_bands_data = BandsData() the_bands_data.set_kpointsdata(kpoints_for_bands) the_bands_data.set_bands(bands_energies, units=bands_data['bands_units'], occupations=the_occupations) new_nodes_list += [('output_band', the_bands_data)] out_dict['linknames_band'] = ['output_band'] # Separate the atomic_occupations dictionary in its own node if it is present atomic_occupations = out_dict.get('atomic_occupations', {}) if atomic_occupations: out_dict.pop('atomic_occupations') atomic_occupations_node = ParameterData(dict=atomic_occupations) new_nodes_list.append( ('output_atomic_occupations', atomic_occupations_node)) output_params = ParameterData(dict=out_dict) new_nodes_list.append((self.get_linkname_outparams(), output_params)) if trajectory_data: from aiida.orm.data.array.trajectory import TrajectoryData from aiida.orm.data.array import ArrayData try: positions = numpy.array( trajectory_data.pop('atomic_positions_relax')) try: cells = numpy.array( trajectory_data.pop('lattice_vectors_relax')) # if KeyError, the MD was at fixed cell except KeyError: cells = numpy.array([in_struc.cell] * len(positions)) symbols = numpy.array( [str(i.kind_name) for i in in_struc.sites]) stepids = numpy.arange( len(positions)) # a growing integer per step # I will insert time parsing when they fix their issues about time # printing (logic is broken if restart is on) traj = TrajectoryData() traj.set_trajectory( stepids=stepids, cells=cells, symbols=symbols, positions=positions, ) for x in trajectory_data.iteritems(): traj.set_array(x[0], numpy.array(x[1])) new_nodes_list.append( (self.get_linkname_outtrajectory(), traj)) except KeyError: # forces, atomic charges and atomic mag. moments, in scf calculation (when outputed) arraydata = ArrayData() for x in trajectory_data.iteritems(): arraydata.set_array(x[0], numpy.array(x[1])) new_nodes_list.append( (self.get_linkname_outarray(), arraydata)) return successful, new_nodes_list
def execute(args): """ The main execution of the script, which will run some preliminary checks on the command line arguments before passing them to the workchain and running it """ try: code = Code.get_from_string(args.codename) except NotExistent as exception: print "Execution failed: could not retrieve the code '{}'".format( args.codename) print "Exception report: {}".format(exception) return try: vibra_code = Code.get_from_string(args.vibra_codename) except NotExistent as exception: print "Execution failed: could not retrieve the code '{}'".format( args.stm_codename) print "Exception report: {}".format(exception) return protocol = Str(args.protocol) # Structure. Bulk silicon SuperCell_1 = 1 SuperCell_2 = 1 SuperCell_3 = 1 scnumbers = np.array([SuperCell_1, SuperCell_2, SuperCell_3]) scarray = ArrayData() scarray.set_array('sca', scnumbers) alat = 5.43 # Angstrom. Not passed to the fdf file (only for internal use) cell = [[ 0., alat / 2, alat / 2, ], [ alat / 2, 0., alat / 2, ], [ alat / 2, alat / 2, 0., ]] pf = alat * 0.125 na = 2 x0 = [[pf, pf, pf], [-pf, -pf, -pf]] s1 = StructureData(cell=cell) for i in range(na): s1.append_atom(position=(x0[i][0], x0[i][1], x0[i][2]), symbols=['Si']) bandskpoints = KpointsData() kpp = [(1, 1., 1., 1.), (15, 0., 0.5, 0.5), (25, 0., 0., 0.), (20, 0.5, 0.5, 0.5), (20, 0., 0.5, 0.5), (15, 0.25, 0.5, 0.75), (20, 0.5, 0.5, 0.5)] lpp = [[0, '\Gamma'], [1, 'X'], [2, '\Gamma'], [3, 'L'], [4, 'X'], [5, 'W'], [6, 'L']] bandskpoints.set_cell(s1.cell, s1.pbc) bandskpoints.set_kpoints(kpp, labels=lpp) if args.structure > 0: structure = load_node(args.structure) else: structure = s1 run(SiestaVibraWorkChain, code=code, vibra_code=vibra_code, scarray=scarray, structure=structure, protocol=protocol, bandskpoints=bandskpoints)