Exemplo n.º 1
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = [self.inputs.script.filename]
        codeinfo.code_uuid = self.inputs.code.uuid
        #codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        local_copy_list = [ (self.inputs.script.uuid, self.inputs.script.filename, self.inputs.script.filename) ]


        # this is minimalistic - one could add things here like:
        #  * if something is passed to the top-level "structure" input, write the file out in a given format
        #  * if type of file is X, pre-process using procedure Y
        #  * ...
        if 'files' in self.inputs:
            for f in self.inputs.files:
                node = self.inputs.files[f]
                local_copy_list.append( (node.uuid, node.filename, node.filename) )

        calcinfo.local_copy_list = local_copy_list
        calcinfo.retrieve_list = ['*']

        return calcinfo
Exemplo n.º 2
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        if not self.inputs.code.can_run_on(
                self.inputs.bigdft_data_folder.computer):
            self.report("This post-processing script {}\
                         can't run on  {} where data resides"                                                             ,
                        format(self.inputs.code, self.inputs.bigdft_data_folder.get_computer_name()))
            return self.exit_codes.ERROR_SCRIPT

        codeinfo.withmpi = False
        codeinfo.cmdline_params = [self.inputs.bigdft_data_folder.get_remote_path()]
        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()

        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = []
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.extend(self.inputs.retrieved_files)

        return calcinfo
Exemplo n.º 3
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = self.inputs.parameters.cmdline_params(
            file1_name=self.inputs.file1.filename,
            file2_name=self.inputs.file2.filename)
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = [
            (self.inputs.file1.uuid, self.inputs.file1.filename, self.inputs.file1.filename),
            (self.inputs.file2.uuid, self.inputs.file2.filename, self.inputs.file2.filename),
        ]
        calcinfo.retrieve_list = [self.metadata.options.output_filename]

        return calcinfo
Exemplo n.º 4
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """

        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.withmpi = self.inputs.metadata.options.withmpi
        codeinfo.cmdline_params = ['PreModRun.txt']

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = [
            (self.inputs.alloy.uuid, self.inputs.alloy.filename, self.inputs.alloy.filename),
            (self.inputs.solver.uuid, self.inputs.solver.filename, self.inputs.solver.filename),
            (self.inputs.temperature.uuid, self.inputs.temperature.filename, self.inputs.temperature.filename),
            (self.inputs.models.uuid, self.inputs.models.filename, self.inputs.models.filename),
            (self.inputs.libphases.uuid, self.inputs.libphases.filename, self.inputs.libphases.filename),
            (self.inputs.libmodel.uuid, self.inputs.libmodel.filename, self.inputs.libmodel.filename),
        ]
        calcinfo.retrieve_list = ['PreModRun','PreModRun.log']

        # write input file
        parameter_parser = ParameterParser(data=self.inputs.parameters)
        parameter_parser.write(folder.get_abs_path('PreModRun.txt'))

        return calcinfo
Exemplo n.º 5
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        local_copy_list = []
        retrieve_list = []
        # Define which datafiles to analyze
        for item in self.inputs.datafiles.values():
            local_copy_list.append((item.uuid, item.filename, item.filename))
            retrieve_list.append(item.filename)
        calcinfo.local_copy_list = local_copy_list
        calcinfo.retrieve_list = retrieve_list

        return calcinfo
Exemplo n.º 6
0
    def prepare_for_submission(self, folder):
        """
        Create input files.
        sirius.json,
        input.yml

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        codeinfo = datastructures.CodeInfo()
        output_filename = self.metadata.options.output_filename
        codeinfo.cmdline_params = ['--input=input.yml']
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # with config from input
        structure = self.inputs.structure
        kpoints = self.inputs.kpoints
        magnetization = self.inputs.magnetization
        # sirius_json = make_sirius_json(self.inputs.sirius_config.get_dict()['parameters'],
        sirius_json = self.inputs.sirius_config.get_dict()
        with tempfile.NamedTemporaryFile(mode='w',
                                         suffix='.json',
                                         delete=False) as sirius_tmpfile:
            # insert Pseudopotentials directly into json
            sirius_json = self._read_pseudos(sirius_json)
            # dump to file
            json.dump(sirius_json, sirius_tmpfile)
        sirius_config = SinglefileData(file=sirius_tmpfile.name)
        sirius_config.store()
        # prepare YAML input for NLCG
        with tempfile.NamedTemporaryFile(mode='w', suffix='.yml',
                                         delete=False) as sirius_md_yaml:
            out = yaml.dump(
                {'parameters': self.inputs.sirius_md_params.get_dict()})
            md_tmpfile_name = sirius_md_yaml.name
            sirius_md_yaml.write(out)
        sirius_md_config = SinglefileData(file=md_tmpfile_name)
        sirius_md_config.store()

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = [
            (sirius_config.uuid, sirius_config.filename, 'sirius.json'),
            (sirius_md_config.uuid, sirius_md_config.filename, 'input.yml')
        ]
        calcinfo.retrieve_list = [
            self.metadata.options.output_filename, 'md_results.json'
        ]

        return calcinfo
Exemplo n.º 7
0
 def prepare_for_submission(self, folder):
     codeinfo = datastructures.CodeInfo()
     codeinfo.code_uuid = self.inputs.code.uuid
     codeinfo.withmpi = self.inputs.metadata.options.withmpi
     if self.inputs.metadata.options.command_line != '':
         codeinfo.cmdline_params = \
             self.inputs.metadata.options.command_line.split()
     # Prepare a `CalcInfo` to be returned to the engine
     calcinfo = datastructures.CalcInfo()
     calcinfo.codes_info = [codeinfo]
     calcinfo.local_copy_list = \
         self.inputs.metadata.options.local_copy_list.get_list()
     calcinfo.retrieve_list = \
         self.inputs.metadata.options.retrieve_list.get_list()
     return calcinfo
Exemplo n.º 8
0
    def prepare_for_submission(self, folder: folders.Folder):
        """Create input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        self._validate_inputs()

        dos_filenames = OpenmxCalculation.dos_filenames

        remote_symlink_list = [
            (self.inputs.openmx_output_folder.computer.uuid,
             os.path.join(self.inputs.openmx_output_folder.get_remote_path(),
                          dos_filenames['val']),
             os.path.join(self._DATA_PATH, dos_filenames['val'])),
            (self.inputs.openmx_output_folder.computer.uuid,
             os.path.join(self.inputs.openmx_output_folder.get_remote_path(),
                          dos_filenames['vec']),
             os.path.join(self._DATA_PATH, dos_filenames['vec'])),
        ]

        retrieve_list = self._generate_retrieve_list()

        input_file_content = self._write_input_file()
        with folder.open(self._INPUT_FILE, 'w') as handle:
            handle.write(input_file_content)

        # Fill out the `CodeInfo`
        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = ([
            dos_filenames['val'], dos_filenames['vec']
        ])
        codeinfo.stdin_name = self._INPUT_FILE
        codeinfo.stdout_name = self._OUTPUT_FILE
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.with_mpi = True

        # Fill out the `CalcInfo`
        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.remote_symlink_list = remote_symlink_list
        calcinfo.retrieve_list = retrieve_list
        calcinfo.retrieve_list.append(self._OUTPUT_FILE)

        return calcinfo
Exemplo n.º 9
0
    def prepare_for_submission(self, folder):
        """This method is called prior to job submission with a set of calculation input nodes.

        The inputs will be validated and sanitized, after which the necessary input files will be written to disk in a
        temporary folder. A CalcInfo instance will be returned that contains lists of files that need to be copied to
        the remote machine before job submission, as well as file lists that are to be retrieved after job completion.

        :param folder: an aiida.common.folders.Folder to temporarily write files on disk
        :returns: CalcInfo instance
        """
        from aiida_codtools.cli.utils.parameters import CliParameters

        try:
            parameters = self.inputs.parameters.get_dict()
        except AttributeError:
            parameters = {}

        # The input file should simply contain the relative filename that contains the CIF to be deposited
        with folder.open(self.options.input_filename, 'w') as handle:
            handle.write(u'{}\n'.format(self.filename_cif))

        # Write parameters that relate to the config file to that file and remove them from the CLI parameters
        with folder.open(self.filename_config, 'w') as handle:
            for key in self._config_keys:
                if key in parameters:
                    handle.write(u'{}={}\n'.format(key, parameters.pop(key)))

        cli_parameters = copy.deepcopy(self._default_cli_parameters)
        cli_parameters.update(parameters)

        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.cmdline_params = CliParameters.from_dictionary(cli_parameters).get_list()
        codeinfo.stdin_name = self.options.input_filename
        codeinfo.stdout_name = self.options.output_filename
        codeinfo.stderr_name = self.options.error_filename

        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.retrieve_list = [self.options.output_filename, self.options.error_filename]
        calcinfo.local_copy_list = [(self.inputs.cif.uuid, self.inputs.cif.filename, self.filename_cif)]
        calcinfo.remote_copy_list = []

        return calcinfo
Exemplo n.º 10
0
    def prepare_for_submission(self, folder):
        """
        """
        self.write_input_files(folder)

        # Code
        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = [self.options.input_filename]
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdout_name = self.metadata.options.output_filename

        # Prepare a `CalaInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        retrieve_list = ['sqs.out']
        calcinfo.retrieve_list = retrieve_list + [self.metadata.options.output_filename]

        return calcinfo
Exemplo n.º 11
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """

        # Add the used defined input parameters to the input file
        inp = self.inputs.parameters.get_dict()

        # Add the structure related variables to the dictionary
        if 'structure' in self.inputs:
            inp.update(structure_to_abivars(self.inputs.structure.get_pymatgen()))

        lines = []
        for inp_var, value in inp.items():
            lines.append(str(InputVariable(inp_var, value)))

        with io.open(folder.get_abs_path(self._DEFAULT_INPUT_FILE), mode="w", encoding="utf-8") as fobj:
            fobj.writelines(lines)

        # Create code info
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        # codeinfo.stdin_name = self.options.input_filename
        # This gives the path to the input file to Abinit rather than passing the input from standard input
        #codeinfo.cmdline_params = ['<', self.options.input_filename]
        codeinfo.cmdline_params = [self.options.input_filename]
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.stdin_name = self.options.input_filename
        calcinfo.stdout_name = self.options.output_filename       
        calcinfo.retrieve_list = [self.metadata.options.output_filename]
        calcinfo.retrieve_list = [self._DEFAULT_OUTPUT_FILE, self._DEFAULT_GSR_FILE_NAME]
        #calcinfo.retrieve_list += settings.pop('additional_retrieve_list', [])

        return calcinfo
Exemplo n.º 12
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        codeinfo = datastructures.CodeInfo()
        output_filename = self.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # with config from input
        structure = self.inputs.structure
        kpoints = self.inputs.kpoints
        magnetization = self.inputs.magnetization

        sirius_json = self.inputs.sirius_config.get_dict()
        sirius_json = add_cell_kpoints_mag_to_sirius(sirius_json, structure,
                                                     magnetization, kpoints)
        # TODO check schema
        Schema(sirius_options)(sirius_json)

        with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as sirius_tmpfile:
            # insert Pseudopotentials directly into json
            sirius_json = self._read_pseudos(sirius_json)
            # dump to file
            json.dump(sirius_json, sirius_tmpfile)
        sirius_config = SinglefileData(file=sirius_tmpfile.name)
        sirius_config.store()

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = [
            (sirius_config.uuid, sirius_config.filename, 'sirius.json')

        ]
        calcinfo.retrieve_list = [self.metadata.options.output_filename, 'nlcg.out']

        return calcinfo
Exemplo n.º 13
0
    def prepare_for_submission(self, folder):
        """
        Create input files.
        sirius.json,
        nlcg.yaml

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        codeinfo = datastructures.CodeInfo()
        output_filename = self.metadata.options.output_filename
        codeinfo.cmdline_params = ['--output=output.json']
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # with config from input
        structure = self.inputs.structure
        kpoints = self.inputs.kpoints
        magnetization = self.inputs.magnetization
        sirius_json = make_sirius_json(self.inputs.sirius_config.get_dict()['parameters'],
                                       structure, kpoints, magnetization)
        with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as sirius_tmpfile:
            sirius_json = self._read_pseudos(sirius_json)
            sirius_tmpfile_name = sirius_tmpfile.name
            # merge with settings given from outside
            sirius_json = {**sirius_json, **self.inputs.sirius_config.get_dict()}
            # dump to file
            json.dump(sirius_json, sirius_tmpfile)
        sirius_config = SinglefileData(file=sirius_tmpfile_name)
        sirius_config.store()

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = [
            (sirius_config.uuid, sirius_config.filename, 'sirius.json')
        ]
        calcinfo.retrieve_list = [self.metadata.options.output_filename, 'output.json']

        return calcinfo
Exemplo n.º 14
0
    def prepare_for_submission(self, folder):
        """This method is called prior to job submission with a set of calculation input nodes.

        The inputs will be validated and sanitized, after which the necessary input files will be written to disk in a
        temporary folder. A CalcInfo instance will be returned that contains lists of files that need to be copied to
        the remote machine before job submission, as well as file lists that are to be retrieved after job completion.

        :param folder: an aiida.common.folders.Folder to temporarily write files on disk
        :returns: CalcInfo instance
        """
        from aiida_codtools.cli.utils.parameters import CliParameters

        try:
            parameters = self.inputs.parameters.get_dict()
        except AttributeError:
            parameters = {}

        self._validate_resources()

        cli_parameters = copy.deepcopy(self._default_cli_parameters)
        cli_parameters.update(parameters)

        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.cmdline_params = CliParameters.from_dictionary(cli_parameters).get_list()
        codeinfo.stdin_name = self.options.input_filename
        codeinfo.stdout_name = self.options.output_filename
        codeinfo.stderr_name = self.options.error_filename

        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.retrieve_list = [self.options.output_filename, self.options.error_filename]
        calcinfo.local_copy_list = [(self.inputs.cif.uuid, self.inputs.cif.filename, self.options.input_filename)]
        calcinfo.remote_copy_list = []

        return calcinfo
Exemplo n.º 15
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """

        # set up the mkm file
        with folder.open(self.inputs.mkm_filename.value, 'w', encoding='utf8') as handle:

            ## Writing the reaction conditions
            handle.write(f"scaler = '{self.inputs.scaler.value}' \n")
            handle.write(f'rxn_expressions = {self.inputs.rxn_expressions.get_list()} \n')
            handle.write(f'surface_names = {self.inputs.surface_names.get_list()} \n')
            handle.write(f'descriptor_names = {self.inputs.descriptor_names.get_list()} \n')
            handle.write(f'descriptor_ranges = {self.inputs.descriptor_ranges.get_list()} \n')
            handle.write(f'resolution = {self.inputs.resolution.value} \n')
            handle.write(f'temperature = {self.inputs.temperature.value} \n')
            handle.write(f'species_definitions = {self.inputs.species_definitions.get_dict()} \n')
            handle.write(f"data_file = '{self.inputs.data_file.value}' \n")
            handle.write(f"input_file = '{self.inputs.energies.filename}' \n")
            handle.write(f"gas_thermo_mode = '{self.inputs.gas_thermo_mode.value}' \n")
            handle.write(f"adsorbate_thermo_mode = '{self.inputs.adsorbate_thermo_mode.value}' \n")
            # handle.write('ideal_gas_params = {d} \n'.format(d=self.inputs.ideal_gas_params.get_dict()))
            handle.write(f'scaling_constraint_dict = {self.inputs.scaling_constraint_dict.get_dict()} \n')

            ## Only related to electrochemistry
            if self.inputs.electrocatal.value == True: #pylint: disable=singleton-comparison
                if self.inputs.scaler.value == 'GeneralizedLinearScaler': #pylint: disable=singleton-comparison
                    handle.write(f'voltage = {self.inputs.voltage.value} \n')
                    handle.write(f'pH = {self.inputs.pH.value} \n')

                elif self.inputs.scaler.value != 'GeneralizedLinearScaler':
                    handle.write(f"potential_reference_scale = '{self.inputs.potential_reference_scale.value}' \n")
                    handle.write(f'extrapolated_potential = {self.inputs.extrapolated_potential.value} \n')
                    handle.write(f'voltage_diff_drop = {self.inputs.voltage_diff_drop.value} \n')
                    handle.write(f'sigma_input = {self.inputs.sigma_input.get_list()} \n')
                    handle.write(f'Upzc = {self.inputs.Upzc.value} \n')

                handle.write(f'beta = {self.inputs.beta.value} \n')
                for val in self.inputs.electrochemical_thermo_mode.get_list():
                    handle.write(f"electrochemical_thermo_mode = '{val}' \n")

            ## Write numerical data last
            handle.write(f'decimal_precision = {self.inputs.decimal_precision.value} \n')
            handle.write(f'tolerance = {self.inputs.tolerance.value} \n')
            handle.write(f'max_rootfinding_iterations = {self.inputs.max_rootfinding_iterations.value} \n')
            handle.write(f'max_bisections = {self.inputs.max_bisections.value} \n')
            handle.write(f"numerical_solver = '{self.inputs.numerical_solver.value}' \n")


        # write the simplest run command
        with folder.open(self.options.input_filename, 'w', encoding='utf8') as handle:
            handle.write('from catmap import ReactionModel \n')
            handle.write(f"mkm_file = '{self.inputs.mkm_filename.value}' \n")
            handle.write('model = ReactionModel(setup_file=mkm_file) \n')
            handle.write("model.output_variables += ['production_rate'] \n")
            handle.write('model.run() \n')

        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdout_name = self.options.output_filename
        codeinfo.stdin_name = self.options.input_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = [
            (self.inputs.energies.uuid, self.inputs.energies.filename, self.inputs.energies.filename),
        ]
        calcinfo.retrieve_list = [self.metadata.options.output_filename, self.inputs.data_file.value]

        return calcinfo
Exemplo n.º 16
0
    def prepare_for_submission(self, folder):
        """Create the input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings')
        else:
            settings = {}

        # Check that a pseudo potential was specified for each kind present in the `StructureData`
        kinds = [kind.name for kind in self.inputs.structure.kinds]
        if set(kinds) != set(self.inputs.pseudos.keys()):
            raise exceptions.InputValidationError(
                'Mismatch between the defined pseudos and the list of kinds of the structure.\n'
                'Pseudos: {};\nKinds: {}'.format(', '.join(list(self.inputs.pseudos.keys())), ', '.join(list(kinds))))

        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        # Create the subfolder that will contain the pseudopotentials
        folder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True)
        # Create the subfolder for the output data (sometimes Quantum ESPRESSO codes crash if the folder does not exist)
        folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True)

        # If present, add also the Van der Waals table to the pseudo dir. Note that the name of the table is not checked
        # but should be the one expected by Quantum ESPRESSO.
        if 'vdw_table' in self.inputs:
            uuid = self.inputs.vdw_table.uuid
            src_path = self.inputs.vdw_table.filename
            dst_path = os.path.join(self._PSEUDO_SUBFOLDER, self.inputs.vdw_table.filename)
            local_copy_list.append((uuid, src_path, dst_path))

        if 'hubbard_file' in self.inputs:
            uuid = self.inputs.hubbard_file.filename
            src_path = self.inputs.hubbard_file.filename
            dst_path = self.input_file_name_hubbard_file
            local_copy_list.append((uuid, src_path, dst_path))

        arguments = [
            self.inputs.parameters,
            settings,
            self.inputs.pseudos,
            self.inputs.structure,
        ]
        if self._use_kpoints:
            arguments.append(self.inputs.kpoints)
        input_filecontent, local_copy_pseudo_list = self._generate_PWCPinputdata(*arguments)
        local_copy_list += local_copy_pseudo_list

        with folder.open(self.metadata.options.input_filename, 'w') as handle:
            handle.write(input_filecontent)

        # operations for restart
        symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage)  # a boolean
        if symlink:
            if 'parent_folder' in self.inputs:
                # I put the symlink to the old parent ./out folder
                remote_symlink_list.append((
                    self.inputs.parent_folder.computer.uuid,
                    os.path.join(self.inputs.parent_folder.get_remote_path(), self._restart_copy_from),
                    self._restart_copy_to
                ))
        else:
            # copy remote output dir, if specified
            if 'parent_folder' in self.inputs:
                remote_copy_list.append((
                    self.inputs.parent_folder.computer.uuid,
                    os.path.join(self.inputs.parent_folder.get_remote_path(), self._restart_copy_from),
                    self._restart_copy_to
                ))

        # Create an `.EXIT` file if `only_initialization` flag in `settings` is set to `True`
        if settings.pop('ONLY_INITIALIZATION', False):
            with folder.open('{}.EXIT'.format(self._PREFIX), 'w') as handle:
                handle.write('\n')

        # Check if specific inputs for the ENVIRON module where specified
        environ_namelist = settings.pop('ENVIRON', None)
        if environ_namelist is not None:
            if not isinstance(environ_namelist, dict):
                raise exceptions.InputValidationError('ENVIRON namelist should be specified as a dictionary')
            # We first add the environ flag to the command-line options (if not already present)
            try:
                if '-environ' not in settings['CMDLINE']:
                    settings['CMDLINE'].append('-environ')
            except KeyError:
                settings['CMDLINE'] = ['-environ']
            # To create a mapping from the species to an incremental fortran 1-based index
            # we use the alphabetical order as in the inputdata generation
            kind_names = sorted([kind.name for kind in self.inputs.structure.kinds])
            mapping_species = {kind_name: (index + 1) for index, kind_name in enumerate(kind_names)}

            with folder.open(self._ENVIRON_INPUT_FILE_NAME, 'w') as handle:
                handle.write('&ENVIRON\n')
                for k, v in sorted(six.iteritems(environ_namelist)):
                    handle.write(convert_input_to_namelist_entry(k, v, mapping=mapping_species))
                handle.write('/\n')

        # Check for the deprecated 'ALSO_BANDS' setting and if present fire a deprecation log message
        also_bands = settings.pop('ALSO_BANDS', None)
        if also_bands:
            self.node.logger.warning(
                "The '{}' setting is deprecated as bands are now parsed by default. "
                "If you do not want the bands to be parsed set the '{}' to True {}. "
                'Note that the eigenvalue.xml files are also no longer stored in the repository'
                .format('also_bands', 'no_bands', type(self))
            )

        calcinfo = datastructures.CalcInfo()

        calcinfo.uuid = str(self.uuid)
        # Empty command line by default
        cmdline_params = settings.pop('CMDLINE', [])
        # we commented calcinfo.stin_name and added it here in cmdline_params
        # in this way the mpirun ... pw.x ... < aiida.in
        # is replaced by mpirun ... pw.x ... -in aiida.in
        # in the scheduler, _get_run_line, if cmdline_params is empty, it
        # simply uses < calcinfo.stin_name
        calcinfo.cmdline_params = (list(cmdline_params) + ['-in', self.metadata.options.input_filename])

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = (list(cmdline_params) + ['-in', self.metadata.options.input_filename])
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid
        calcinfo.codes_info = [codeinfo]

        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list

        # Retrieve by default the output file and the xml file
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(self.metadata.options.output_filename)
        calcinfo.retrieve_list.extend(self.xml_filepaths)
        calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])
        calcinfo.retrieve_list += self._internal_retrieve_list

        # Retrieve the k-point directories with the xml files to the temporary folder
        # to parse the band eigenvalues and occupations but not to have to save the raw files
        # if and only if the 'no_bands' key was not set to true in the settings
        no_bands = settings.pop('NO_BANDS', False)
        if no_bands is False:
            xmlpaths = os.path.join(self._OUTPUT_SUBFOLDER, self._PREFIX + '.save', 'K*[0-9]', 'eigenval*.xml')
            calcinfo.retrieve_temporary_list = [[xmlpaths, '.', 2]]

        # We might still have parser options in the settings dictionary: pop them.
        _pop_parser_options(self, settings)

        if settings:
            unknown_keys = ', '.join(list(settings.keys()))
            raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys))

        return calcinfo
Exemplo n.º 17
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        ### SETUP ###
        local_copy_list = []

        ### INPUT CHECK ###
        # PSEUDOS
        for kind in self.inputs.structure.get_kind_names():
            if kind not in self.inputs.pseudos:
                raise ValueError(f'no pseudo available for element {kind}')
            elif not isinstance(self.inputs.pseudos[kind], (Psp8Data, JthXmlData)):
                raise ValueError(f'pseudo for element {kind} is not of type Psp8Data or JthXmlData')

        # KPOINTS
        if 'ngkpt' in self.inputs.parameters.keys():
            raise ValueError('`ngkpt` should not be specified in input parameters')
        if 'kptopt' in self.inputs.parameters.keys():
            raise ValueError('`kptopt` should not be specified in input parameters')

        ### PREPARATION ###
        # PSEUDOS
        folder.get_subfolder(self._DEFAULT_PSEUDO_SUBFOLDER, create=True)
        for kind in self.inputs.structure.get_kind_names():
            psp = self.inputs.pseudos[kind]
            local_copy_list.append((psp.uuid, psp.filename, self._DEFAULT_PSEUDO_SUBFOLDER + kind + '.psp8'))

        # KPOINTS
        kpoints_mesh = self.inputs.kpoints.get_kpoints_mesh()[0]

        ### INPUTS ###
        input_parameters = self.inputs.parameters.get_dict()
        shiftk = input_parameters.pop('shiftk', [0.0, 0.0, 0.0])

        # TODO: There must be a better way to do this
        # maybe we can convert the PseudoPotential objects into pymatgen Pseudo objects?
        znucl = structure_to_abivars(self.inputs.structure.get_pymatgen())['znucl']
        pseudo_parameters = {
            'pseudos': '"' + ', '.join([Element.from_Z(Z).symbol + '.psp8' for Z in znucl]) + '"',
            'pp_dirpath': '"' + self._DEFAULT_PSEUDO_SUBFOLDER + '"'
        }

        input_parameters = {**input_parameters, **pseudo_parameters}

        abin = AbinitInput(
            structure=self.inputs.structure.get_pymatgen(),
            pseudos=HGH_TABLE,
            abi_kwargs=input_parameters
        )
        abin.set_kmesh(
            ngkpt=kpoints_mesh,
            shiftk=shiftk
        )

        with io.open(folder.get_abs_path(self._DEFAULT_INPUT_FILE), mode='w', encoding='utf-8') as f:
            f.write(abin.to_string(with_pseudos=False))

        ### CODE ###
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.cmdline_params = [self.options.input_filename]
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        ### CALC INFO ###
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.stdin_name = self.options.input_filename
        calcinfo.stdout_name = self.options.output_filename
        calcinfo.retrieve_list = [self.metadata.options.output_filename]
        calcinfo.retrieve_list = [self._DEFAULT_OUTPUT_FILE, self._DEFAULT_GSR_FILE_NAME, self._DEFAULT_TRAJECT_FILE_NAME]
        calcinfo.remote_symlink_list = []
        calcinfo.remote_copy_list = []
        calcinfo.local_copy_list = local_copy_list
        if 'parent_calc_folder' in self.inputs:
            comp_uuid = self.inputs.parent_calc_folder.computer.uuid
            remote_path = self.inputs.parent_calc_folder.get_remote_path()
            copy_info = (comp_uuid, remote_path, self._DEFAULT_PARENT_CALC_FLDR_NAME)
            # If running on the same computer - make a symlink.
            if self.inputs.code.computer.uuid == comp_uuid:
                calcinfo.remote_symlink_list.append(copy_info)
            # If not - copy the folder.
            else:
                calcinfo.remote_copy_list.append(copy_info)

        return calcinfo
Exemplo n.º 18
0
    def prepare_for_submission(self, folder):
        """Create the input file(s) from the input nodes.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        # process the settings so that capitalization isn't an issue
        settings = uppercase_dict(self.inputs.settings.get_dict()) if 'settings' in self.inputs else {}

        # validate the input parameters and pseudopotentials
        self._validate_parameters()
        self._validate_pseudos()

        # create lists which specify files to copy and symlink
        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        # create the subfolder which will contain the pseudopotential files
        folder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True)

        # set the input and stdout filenames from the prefix
        self.metadata.options.input_filename = f'{self.metadata.options.prefix}.in'
        self.metadata.options.output_filename = f'{self.metadata.options.prefix}.out'

        # generate the input file content and list of pseudopotential files to copy
        arguments = [
            self.inputs.parameters,
            self.inputs.pseudos,
            self.inputs.structure,
            self.inputs.kpoints
        ]
        input_filecontent, local_copy_pseudo_list = self._generate_inputdata(*arguments)

        # merge the pseudopotential copy list with the overall copylist and write the input file
        local_copy_list += local_copy_pseudo_list
        with io.open(folder.get_abs_path(self.metadata.options.input_filename), mode='w', encoding='utf-8') as stream:
            stream.write(input_filecontent)

        # list the files to copy or symlink in the case of a restart
        if 'parent_folder' in self.inputs:
            # symlink by default if on the same computer, otherwise copy by default
            same_computer = self.inputs.code.computer.uuid == self.inputs.parent_folder.computer.uuid
            if settings.pop('PARENT_FOLDER_SYMLINK', same_computer):
                remote_symlink_list.append((
                    self.inputs.parent_folder.computer.uuid,
                    os.path.join(self.inputs.parent_folder.get_remote_path(), '*'),
                    './')
                )
            else:
                remote_copy_list.append((
                    self.inputs.parent_folder.computer.uuid,
                    os.path.join(self.inputs.parent_folder.get_remote_path(), '*'),
                    './')
                )

        # generate the commandline parameters
        cmdline_params = self._generate_cmdline_params(settings)

        # generate list of files to retrieve from wherever the calculation is run
        retrieve_list = self._generate_retrieve_list(self.inputs.parameters, settings)

        # set up the code info to pass to `CalcInfo`
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.cmdline_params = cmdline_params
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # set up the calc info so AiiDA knows what to do with everything
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.stdin_name =  self.metadata.options.input_filename
        calcinfo.stdout_name = self.metadata.options.output_filename
        calcinfo.retrieve_list = retrieve_list
        calcinfo.remote_symlink_list = remote_symlink_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.local_copy_list = local_copy_list

        return calcinfo
Exemplo n.º 19
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        # check that either structure or structurefile are set
        # if both are set, fail.
#        if self.inputs.structurefile is None and self.inputs.structure is None:
#            raise exceptions.InputValidationError("either structure or structurefile must be set")
#        if self.inputs.structurefile is not None and self.inputs.structure is not None:
#            raise exceptions.InputValidationError("Only one of structure or structurefile must be set")

        # somehow aiida sends back unicode strings here
        dico = BigDFT_files.Inputfile()
        dico.update(self.inputs.parameters.dict)

        bigdft_calc = PluginSystemCalculator()
        local_copy_list = []
        # if the structure is not already inside input dict
        if 'posinp' not in dico:
            posinp_filename = self.inputs.structurefile.value
            if self.inputs.structure is not None:
                print("writing input posinp file")
                posinp_string = self.inputs.structure._prepare_xyz()[0]
                if "jobname" not in self.inputs.metadata.options:
                    posinp_filename = self._POSINP_FILE_NAME
                else:
                    posinp_filename = self.inputs.metadata.options.jobname + ".xyz"
    #            bigdft_calc.update_global_options(units="angstroem")
                posinp_file = open(posinp_filename, "wb")

                posinp_file.write(posinp_string)
                posinp_file.close()
            posinp_filedata = SinglefileData(
                file=os.path.abspath(posinp_filename)).store()

    #        BigDFT_input.set_atomic_positions(dico, posinp_filename)
    #        bigdft_calc._run_options(posinp={})

            local_copy_list = [
                (posinp_filedata.uuid, posinp_filedata.filename, posinp_filedata.filename)]

        # setup pseudopotentials if needed
        if self.inputs.pseudos is not None:
            for filename in self.inputs.pseudos:
                pseudo_filedata = SinglefileData(
                    file=os.path.abspath(filename)).store()
                local_copy_list.append(
                    (pseudo_filedata.uuid, pseudo_filedata.filename, pseudo_filedata.filename))
        # generate yaml input file from dict and whatever

        if "jobname" in self.inputs.metadata.options:
            bigdft_calc.update_global_options(
                name=self.inputs.metadata.options.jobname)
        bigdft_calc._run_options(input=dico, run_dir=folder.abspath)
        bigdft_calc.pre_processing()
        if "jobname" not in self.inputs.metadata.options:
            input_filename = self._INPUT_FILE_NAME
        else:
            input_filename = self.inputs.metadata.options.jobname + ".yaml"
        input_filedata = SinglefileData(
            file=folder.get_abs_path(input_filename)).store()
        local_copy_list.append(
            (input_filedata.uuid, input_filedata.filename, input_filename))

        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        outfile = self.inputs.metadata.options.output_filename
        timefile = self._TIMING_FILE_NAME
        if "jobname" in self.inputs.metadata.options:
            outfile = "log-" + self.inputs.metadata.options.jobname + ".yaml"
            timefile = "time-" + self.inputs.metadata.options.jobname + ".yaml"

#        codeinfo.stdout_name = outfile
        codeinfo.withmpi = self.inputs.metadata.options.withmpi
        if "jobname" in self.inputs.metadata.options:
            codeinfo.cmdline_params = ["--name=" +
                                       self.inputs.metadata.options.jobname]
        #local_copy_list = []
        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.retrieve_list = [outfile,
                                  timefile,
                                  "forces_posinp.yaml",
                                  "final_posinp.yaml",
                                  ["./debug/bigdft-err*", ".", 2]]
        calcinfo.retrieve_list.extend(self.inputs.extra_retrieved_files)
        return calcinfo
Exemplo n.º 20
0
    def prepare_for_submission(self, folder):
        """
        Create input file: inpsd.dat

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        # Create input file: inpsd.dat
        input_dmdata = self.inputs.dmdata
        input_jij = self.inputs.jij
        input_momfile = self.inputs.momfile
        input_posfile = self.inputs.posfile
        input_qfile = self.inputs.qfile

        input_simid = self.inputs.simid
        input_ncell = self.inputs.ncell
        input_BC = self.inputs.BC
        input_cell = self.inputs.cell
        input_do_prnstruct = self.inputs.do_prnstruct
        input_maptype = self.inputs.maptype
        input_SDEalgh = self.inputs.SDEalgh
        input_Initmag = self.inputs.Initmag
        input_ip_mode = self.inputs.ip_mode
        input_qm_svec = self.inputs.qm_svec
        input_qm_nvec = self.inputs.qm_nvec
        input_mode = self.inputs.mode
        input_temp = self.inputs.temp
        input_damping = self.inputs.damping
        input_Nstep = self.inputs.Nstep
        input_timestep = self.inputs.timestep
        input_qpoints = self.inputs.qpoints
        input_plotenergy = self.inputs.plotenergy
        input_do_avrg = self.inputs.do_avrg

        input_retrieve_list_name = self.inputs.retrieve_list_name
        # write inpsd.dat
        # it seems we don's need to put it in local_copy_list ?
        with folder.open(self.options.input_filename, 'a+') as f:
            f.write(f'simid    {input_simid.value}\n')

            f.write(f"ncell   {input_ncell.get_array('matrix')[0]}  {input_ncell.get_array('matrix')[1]}  {input_ncell.get_array('matrix')[2]} \n")
            # we set the default array name is "matrix"
            #np.savetxt(f, input_ncell.get_array('matrix'))

            f.write(f'BC    {input_BC.value}\n')

            f.write("cell   ")
            np.savetxt(f, input_cell.get_array('matrix'))

            f.write(f'do_prnstruct    {input_do_prnstruct.value}\n')

            f.write(f'input_posfile    ./{input_posfile.filename}\n')

            f.write(f'exchange    ./{input_jij.filename}\n')

            f.write(f'momfile    ./{input_momfile.filename}\n')

            f.write(f'dm    ./{input_dmdata.filename}\n')

            f.write(f'maptype    {input_maptype.value}\n')

            f.write(f'SDEalgh    {input_SDEalgh.value}\n')

            f.write(f'Initmag    {input_Initmag.value}\n')

            f.write(f'ip_mode    {input_ip_mode.value}\n')

            f.write(f"qm_svec   {input_qm_svec.get_array('matrix')[0]}  {input_qm_svec.get_array('matrix')[1]}  {input_qm_svec.get_array('matrix')[2]} \n")

            f.write(f"qm_nvec   {input_qm_nvec.get_array('matrix')[0]}  {input_qm_nvec.get_array('matrix')[1]}  {input_qm_nvec.get_array('matrix')[2]} \n")

            f.write(f'mode    {input_mode.value}\n')

            f.write(f'temp    {input_temp.value}\n')

            f.write(f'damping    {input_damping.value}\n')

            f.write(f'Nstep    {input_Nstep.value}\n')

            f.write(f'timestep    {input_timestep.value}\n')

            f.write(f'qpoints    {input_qpoints.value}\n')

            f.write(f'qfile    ./{input_qfile.filename}\n')

            f.write(f'plotenergy    {input_plotenergy.value}\n')

            f.write(f'do_avrg    {input_do_avrg.value}\n')

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = []# note that nothing need here for SD
        codeinfo.code_uuid = self.inputs.code.uuid
        #codeinfo.stdout_name = self.metadata.options.output_filename
        #codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = [
            (self.inputs.dmdata.uuid, self.inputs.dmdata.filename,
             self.inputs.dmdata.filename),
            (self.inputs.jij.uuid, self.inputs.jij.filename, self.inputs.jij.filename),
            (self.inputs.momfile.uuid, self.inputs.momfile.filename,
             self.inputs.momfile.filename),
            (self.inputs.posfile.uuid, self.inputs.posfile.filename,
             self.inputs.posfile.filename),
            (self.inputs.qfile.uuid, self.inputs.qfile.filename,
             self.inputs.qfile.filename),
        ]
        #calc_info.remote_copy_list[(self.inputs.parent_folder.computer.uuid, 'output_folder', 'restart_folder')]
        calcinfo.retrieve_list =input_retrieve_list_name.get_list()
        return calcinfo
Exemplo n.º 21
0
    def prepare_for_submission(self, folder):
        """Prepare the calculation job for submission by transforming input nodes into input files.

        In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that
        contains lists of files that need to be copied to the remote machine before job submission, as well as file
        lists that are to be retrieved after job completion.

        :param folder: a sandbox folder to temporarily write files on disk.
        :return: :py:`~aiida.common.datastructures.CalcInfo` instance.
        """
        # pylint: disable=too-many-branches,too-many-statements
        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(),
                                       dict_name='settings')
        else:
            settings = {}

        following_text = self._get_following_text()

        # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
        if 'parameters' in self.inputs:
            parameters = _uppercase_dict(self.inputs.parameters.get_dict(),
                                         dict_name='parameters')
            parameters = {
                k: _lowercase_dict(v, dict_name=k)
                for k, v in parameters.items()
            }
        else:
            parameters = {}

        # =================== NAMELISTS AND CARDS ========================
        try:
            namelists_toprint = settings.pop('NAMELISTS')
            if not isinstance(namelists_toprint, list):
                raise exceptions.InputValidationError(
                    "The 'NAMELISTS' value, if specified in the settings input node, must be a list of strings"
                )
        except KeyError:  # list of namelists not specified; do automatic detection
            namelists_toprint = self._default_namelists

        parameters = self.set_blocked_keywords(parameters)
        parameters = self.filter_namelists(parameters, namelists_toprint)
        file_content = self.generate_input_file(parameters)
        file_content += '\n' + following_text
        input_filename = self.inputs.metadata.options.input_filename
        with folder.open(input_filename, 'w') as infile:
            infile.write(file_content)

        symlink = settings.pop('PARENT_FOLDER_SYMLINK', False)

        remote_copy_list = []
        local_copy_list = []
        remote_symlink_list = []

        ptr = remote_symlink_list if symlink else remote_copy_list

        # copy remote output dir, if specified
        parent_calc_folder = self.inputs.get('parent_folder', None)
        if parent_calc_folder is not None:
            if isinstance(parent_calc_folder, RemoteData):
                parent_calc_out_subfolder = settings.pop(
                    'PARENT_CALC_OUT_SUBFOLDER', self._INPUT_SUBFOLDER)
                ptr.append((parent_calc_folder.computer.uuid,
                            os.path.join(parent_calc_folder.get_remote_path(),
                                         parent_calc_out_subfolder),
                            self._OUTPUT_SUBFOLDER))
            elif isinstance(parent_calc_folder, FolderData):
                for filename in parent_calc_folder.list_object_names():
                    local_copy_list.append(
                        (parent_calc_folder.uuid, filename,
                         os.path.join(self._OUTPUT_SUBFOLDER, filename)))
            elif isinstance(parent_calc_folder, SinglefileData):
                single_file = parent_calc_folder
                local_copy_list.append((single_file.uuid, single_file.filename,
                                        single_file.filename))

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = settings.pop('CMDLINE', [])
        codeinfo.stdin_name = self.inputs.metadata.options.input_filename
        codeinfo.stdout_name = self.inputs.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list

        # Retrieve by default the output file and the xml file
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(
            self.inputs.metadata.options.output_filename)
        settings_retrieve_list = settings.pop('ADDITIONAL_RETRIEVE_LIST', [])
        calcinfo.retrieve_list += settings_retrieve_list
        calcinfo.retrieve_list += self._internal_retrieve_list

        calcinfo.retrieve_singlefile_list = self._retrieve_singlefile_list

        # We might still have parser options in the settings dictionary: pop them.
        _pop_parser_options(self, settings)

        if settings:
            unknown_keys = ', '.join(list(settings.keys()))
            raise exceptions.InputValidationError(
                f'`settings` contained unexpected keys: {unknown_keys}')

        return calcinfo
Exemplo n.º 22
0
    def prepare_for_submission(self, folder):  # pylint: disable=too-many-statements,too-many-branches
        """Prepare the calculation job for submission by transforming input nodes into input files.

        In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that
        contains lists of files that need to be copied to the remote machine before job submission, as well as file
        lists that are to be retrieved after job completion.

        :param folder: a sandbox folder to temporarily write files on disk.
        :return: :py:`~aiida.common.datastructures.CalcInfo` instance.
        """

        def test_offset(offset):
            """Check if the grid has an offset."""
            if any([i != 0. for i in offset]):
                raise NotImplementedError(
                    'Computation of electron-phonon on a mesh with non zero offset is not implemented, '
                    'at the level of epw.x')

        # pylint: disable=too-many-statements,too-many-branches
        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings')
        else:
            settings = {}

        # Copy nscf folder
        parent_folder_nscf = self.inputs.parent_folder_nscf
        parent_calc_nscf = parent_folder_nscf.creator

        if parent_calc_nscf is None:
            raise exceptions.NotExistent('parent_folder<{}> has no parent calculation'.format(parent_folder_nscf.pk))

        # Also, the parent calculation must be on the same computer
        if not self.node.computer.uuid == parent_calc_nscf.computer.uuid:
            raise exceptions.InputValidationError(
                'Calculation has to be launched on the same computer as that of the parent: {}'.format(
                    parent_calc_nscf.computer.get_name()))

        # put by default, default_parent_output_folder = ./out
        parent_calc_out_subfolder_nscf = parent_calc_nscf.process_class._OUTPUT_SUBFOLDER # pylint: disable=protected-access

        # Now phonon folder
        parent_folder_ph = self.inputs.parent_folder_ph
        parent_calc_ph = parent_folder_ph.creator

        # Also, the parent calculation must be on the same computer
        if not self.node.computer.uuid == parent_calc_ph.computer.uuid:
            raise exceptions.InputValidationError(
                'Calculation has to be launched on the same computer as that of the parent: {}'.format(
                    parent_calc_ph.computer.get_name()))

        # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
        parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters')
        parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()}

        if 'INPUTEPW' not in parameters:
            raise exceptions.InputValidationError('required namelist INPUTEPW not specified')

        parameters['INPUTEPW']['outdir'] = self._OUTPUT_SUBFOLDER
        parameters['INPUTEPW']['iverbosity'] = 1
        parameters['INPUTEPW']['prefix'] = self._PREFIX

        try:
            mesh, offset = self.inputs.qpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nq1'] = mesh[0]
            parameters['INPUTEPW']['nq2'] = mesh[1]
            parameters['INPUTEPW']['nq3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the coarse q-point grid') from exception

        try:
            mesh, offset = self.inputs.kpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nk1'] = mesh[0]
            parameters['INPUTEPW']['nk2'] = mesh[1]
            parameters['INPUTEPW']['nk3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the coarse k-point grid') from exception

        try:
            mesh, offset = self.inputs.qfpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nqf1'] = mesh[0]
            parameters['INPUTEPW']['nqf2'] = mesh[1]
            parameters['INPUTEPW']['nqf3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the fine q-point grid') from exception

        try:
            mesh, offset = self.inputs.kfpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nkf1'] = mesh[0]
            parameters['INPUTEPW']['nkf2'] = mesh[1]
            parameters['INPUTEPW']['nkf3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the fine k-point grid') from exception


        # customized namelists, otherwise not present in the distributed epw code
        try:
            namelists_toprint = settings.pop('NAMELISTS')
            if not isinstance(namelists_toprint, list):
                raise exceptions.InputValidationError(
                    "The 'NAMELISTS' value, if specified in the settings input "
                    'node, must be a list of strings')
        except KeyError:  # list of namelists not specified in the settings; do automatic detection
            namelists_toprint = self._compulsory_namelists


        # create the save folder with dvscf and dyn files.
        folder.get_subfolder(self._FOLDER_SAVE, create=True)

        # List of IBZ q-point to be added below EPW. To be removed when removed from EPW.
        qibz_ar = []
        for key, value in sorted(parent_folder_ph.creator.outputs.output_parameters.get_dict().items()):
            if key.startswith('dynamical_matrix_'):
                qibz_ar.append(value['q_point'])

        qibz_node = orm.ArrayData()
        qibz_node.set_array('qibz', np.array(qibz_ar))

        list_of_points = qibz_node.get_array('qibz')
        # Number of q-point in the irreducible Brillouin Zone.
        nqpt = len(list_of_points[0, :])

        # add here the list of point coordinates
        if len(list_of_points) > 1:
            postpend_text = '{} cartesian\n'.format(len(list_of_points))
            for points in list_of_points:
                postpend_text += '{0:18.10f} {1:18.10f} {2:18.10f} \n'.format(*points)

        with folder.open(self.metadata.options.input_filename, 'w') as infile:
            for namelist_name in namelists_toprint:
                infile.write('&{0}\n'.format(namelist_name))
                # namelist content; set to {} if not present, so that we leave an empty namelist
                namelist = parameters.pop(namelist_name, {})
                for key, value in sorted(namelist.items()):
                    infile.write(convert_input_to_namelist_entry(key, value))
                infile.write('/\n')

            # add list of qpoints if required
            if postpend_text is not None:
                infile.write(postpend_text)

        if parameters:
            raise exceptions.InputValidationError(
                'The following namelists are specified in parameters, but are '
                'not valid namelists for the current type of calculation: '
                '{}'.format(','.join(list(parameters.keys()))))

        # copy the parent scratch
        symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage)  # a boolean
        if symlink:
            # I create a symlink to each file/folder in the parent ./out
            folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True)

            remote_symlink_list.append((
                parent_folder_nscf.computer.uuid,
                os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf, '*'),
                self._OUTPUT_SUBFOLDER
            ))

        else:
            # here I copy the whole folder ./out
            remote_copy_list.append((
                parent_folder_nscf.computer.uuid,
                os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf),
                self._OUTPUT_SUBFOLDER
            ))

        prefix = self._PREFIX

        for iqpt in range(1, nqpt+1):
            label = str(iqpt)
            tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-0')
            remote_copy_list.append((
                parent_folder_ph.computer.uuid,
                os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                'save/'+prefix+'.dyn_q0'))
            tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-'+label)
            remote_copy_list.append((
                parent_folder_ph.computer.uuid,
                os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                'save/'+prefix+'.dyn_q'+label))

            if iqpt == 1:
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.dvscf*')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'+prefix+'.dvscf_q'+label))
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.phsave')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'))
            else:
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.q_'+label+'/'+prefix+'.dvscf*')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'+prefix+'.dvscf_q'+label))

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = (list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename])
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list

        # Retrieve by default the output file
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(self.metadata.options.output_filename)
        calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])

        if settings:
            unknown_keys = ', '.join(list(settings.keys()))
            raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys))

        return calcinfo
Exemplo n.º 23
0
    def prepare_for_submission(self, folder):
        """Create the input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(),
                                       dict_name='settings')
        else:
            settings = {}

        following_text = self._get_following_text()

        # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
        if 'parameters' in self.inputs:
            parameters = _uppercase_dict(self.inputs.parameters.get_dict(),
                                         dict_name='parameters')
            parameters = {
                k: _lowercase_dict(v, dict_name=k)
                for k, v in six.iteritems(parameters)
            }
        else:
            parameters = {}

        # Force default values for blocked keywords. NOTE: this is different from PW/CP
        for blocked in self._blocked_keywords:
            namelist = blocked[0].upper()
            key = blocked[1].lower()
            value = blocked[2]
            if namelist in parameters:
                if key in parameters[namelist]:
                    raise exceptions.InputValidationError(
                        "You cannot specify explicitly the '{}' key in the '{}' "
                        'namelist.'.format(key, namelist))
            else:
                parameters[namelist] = {}
            parameters[namelist][key] = value

        # =================== NAMELISTS AND CARDS ========================
        try:
            namelists_toprint = settings.pop('NAMELISTS')
            if not isinstance(namelists_toprint, list):
                raise exceptions.InputValidationError(
                    "The 'NAMELISTS' value, if specified in the settings input node, must be a list of strings"
                )
        except KeyError:  # list of namelists not specified; do automatic detection
            namelists_toprint = self._default_namelists

        input_filename = self.inputs.metadata.options.input_filename
        with folder.open(input_filename, 'w') as infile:
            for namelist_name in namelists_toprint:
                infile.write(u'&{0}\n'.format(namelist_name))
                # namelist content; set to {} if not present, so that we leave an empty namelist
                namelist = parameters.pop(namelist_name, {})
                for key, value in sorted(six.iteritems(namelist)):
                    infile.write(convert_input_to_namelist_entry(key, value))
                infile.write(u'/\n')

            # Write remaning text now, if any
            infile.write(following_text)

        # Check for specified namelists that are not expected
        if parameters:
            raise exceptions.InputValidationError(
                'The following namelists are specified in parameters, but are '
                'not valid namelists for the current type of calculation: '
                '{}'.format(','.join(list(parameters.keys()))))

        remote_copy_list = []
        local_copy_list = []

        # copy remote output dir, if specified
        parent_calc_folder = self.inputs.get('parent_folder', None)
        if parent_calc_folder is not None:
            if isinstance(parent_calc_folder, RemoteData):
                parent_calc_out_subfolder = settings.pop(
                    'PARENT_CALC_OUT_SUBFOLDER', self._INPUT_SUBFOLDER)
                remote_copy_list.append(
                    (parent_calc_folder.computer.uuid,
                     os.path.join(parent_calc_folder.get_remote_path(),
                                  parent_calc_out_subfolder),
                     self._OUTPUT_SUBFOLDER))
            elif isinstance(parent_calc_folder, FolderData):
                # TODO: test me, especially with deep relative paths.
                for filename in parent_calc_folder.list_object_names():
                    local_copy_list.append(
                        (parent_calc_folder.uuid, filename,
                         os.path.join(self._OUTPUT_SUBFOLDER, filename)))
            elif isinstance(parent_calc_folder, SinglefileData):
                # TODO: test me
                single_file = parent_calc_folder
                local_copy_list.append((single_file.uuid, single_file.filename,
                                        single_file.filename))

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = settings.pop('CMDLINE', [])
        codeinfo.stdin_name = self.inputs.metadata.options.input_filename
        codeinfo.stdout_name = self.inputs.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list

        # Retrieve by default the output file and the xml file
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(
            self.inputs.metadata.options.output_filename)
        settings_retrieve_list = settings.pop('ADDITIONAL_RETRIEVE_LIST', [])
        calcinfo.retrieve_list += settings_retrieve_list
        calcinfo.retrieve_list += self._internal_retrieve_list

        calcinfo.retrieve_singlefile_list = self._retrieve_singlefile_list

        # We might still have parser options in the settings dictionary: pop them.
        _pop_parser_options(self, settings)

        if settings:
            unknown_keys = ', '.join(list(settings.keys()))
            raise exceptions.InputValidationError(
                '`settings` contained unexpected keys: {}'.format(
                    unknown_keys))

        return calcinfo
Exemplo n.º 24
0
    def prepare_for_submission(self, folder):
        """Create the input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(),
                                       dict_name='settings')
        else:
            settings = {}

        parent_folder = self.inputs.parent_folder
        parent_calcs = parent_folder.get_incoming(
            node_class=orm.CalcJobNode).all()

        if not parent_calcs:
            raise exceptions.NotExistent(
                'parent_folder<{}> has no parent calculation'.format(
                    parent_folder.pk))
        elif len(parent_calcs) > 1:
            raise exceptions.UniquenessError(
                'parent_folder<{}> has multiple parent calculations'.format(
                    parent_folder.pk))

        parent_calc = parent_calcs[0].node

        # If the parent calculation is a `PhCalculation` we are restarting
        restart_flag = parent_calc.process_type == 'aiida.calculations:quantumespresso.ph'

        # Also, the parent calculation must be on the same computer
        if not self.node.computer.uuid == parent_calc.computer.uuid:
            raise exceptions.InputValidationError(
                'Calculation has to be launched on the same computer as that of the parent: {}'
                .format(parent_calc.computer.get_name()))

        # put by default, default_parent_output_folder = ./out
        try:
            default_parent_output_folder = parent_calc.process_class._OUTPUT_SUBFOLDER
        except AttributeError:
            try:
                default_parent_output_folder = parent_calc._get_output_folder()
            except AttributeError:
                raise exceptions.InputValidationError(
                    'parent calculation does not have a default output subfolder'
                )
        parent_calc_out_subfolder = settings.pop('PARENT_CALC_OUT_SUBFOLDER',
                                                 default_parent_output_folder)

        # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
        parameters = _uppercase_dict(self.inputs.parameters.get_dict(),
                                     dict_name='parameters')
        parameters = {
            k: _lowercase_dict(v, dict_name=k)
            for k, v in six.iteritems(parameters)
        }

        prepare_for_d3 = settings.pop('PREPARE_FOR_D3', False)
        if prepare_for_d3:
            self._blocked_keywords += [('INPUTPH', 'fildrho'),
                                       ('INPUTPH', 'drho_star%open'),
                                       ('INPUTPH', 'drho_star%ext'),
                                       ('INPUTPH', 'drho_star%dir')]

        for namelist, flag in self._blocked_keywords:
            if namelist in parameters:
                if flag in parameters[namelist]:
                    raise exceptions.InputValidationError(
                        "Cannot specify explicitly the '{}' flag in the '{}' namelist or card."
                        .format(flag, namelist))

        if 'INPUTPH' not in parameters:
            raise exceptions.InputValidationError(
                'required namelist INPUTPH not specified')

        parameters['INPUTPH']['outdir'] = self._OUTPUT_SUBFOLDER
        parameters['INPUTPH']['iverbosity'] = 1
        parameters['INPUTPH']['prefix'] = self._PREFIX
        parameters['INPUTPH']['fildyn'] = self._OUTPUT_DYNAMICAL_MATRIX_PREFIX

        if prepare_for_d3:
            parameters['INPUTPH']['fildrho'] = self._DRHO_PREFIX
            parameters['INPUTPH']['drho_star%open'] = True
            parameters['INPUTPH']['drho_star%ext'] = self._DRHO_STAR_EXT
            parameters['INPUTPH']['drho_star%dir'] = self._FOLDER_DRHO

        try:
            mesh, offset = self.inputs.qpoints.get_kpoints_mesh()

            if any([i != 0. for i in offset]):
                raise NotImplementedError(
                    'Computation of phonons on a mesh with non zero offset is not implemented, at the level of ph.x'
                )

            parameters['INPUTPH']['ldisp'] = True
            parameters['INPUTPH']['nq1'] = mesh[0]
            parameters['INPUTPH']['nq2'] = mesh[1]
            parameters['INPUTPH']['nq3'] = mesh[2]

            postpend_text = None

        except AttributeError:
            # this is the case where no mesh was set. Maybe it's a list
            try:
                list_of_points = self.inputs.qpoints.get_kpoints(
                    cartesian=True)
            except AttributeError:
                # In this case, there are no info on the qpoints at all
                raise exceptions.InputValidationError(
                    'Input `qpoints` contains neither a mesh nor a list of points'
                )

            # change to 2pi/a coordinates
            lattice_parameter = numpy.linalg.norm(self.inputs.qpoints.cell[0])
            list_of_points *= lattice_parameter / (2. * numpy.pi)

            # add here the list of point coordinates
            if len(list_of_points) > 1:
                parameters['INPUTPH']['qplot'] = True
                parameters['INPUTPH']['ldisp'] = True
                postpend_text = u'{}\n'.format(len(list_of_points))
                for points in list_of_points:
                    postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f}  1\n'.format(
                        *points)

                # Note: the weight is fixed to 1, because ph.x calls these
                # things weights but they are not such. If they are going to
                # exist with the meaning of weights, they will be supported
            else:
                parameters['INPUTPH']['ldisp'] = False
                postpend_text = u''
                for points in list_of_points:
                    postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f}\n'.format(
                        *points)

        # customized namelists, otherwise not present in the distributed ph code
        try:
            namelists_toprint = settings.pop('NAMELISTS')
            if not isinstance(namelists_toprint, list):
                raise exceptions.InputValidationError(
                    "The 'NAMELISTS' value, if specified in the settings input "
                    'node, must be a list of strings')
        except KeyError:  # list of namelists not specified in the settings; do automatic detection
            namelists_toprint = self._compulsory_namelists

        # create a folder for the dynamical matrices
        if not restart_flag:  # if it is a restart, it will be copied over
            folder.get_subfolder(self._FOLDER_DYNAMICAL_MATRIX, create=True)

        with folder.open(self.metadata.options.input_filename, 'w') as infile:
            for namelist_name in namelists_toprint:
                infile.write(u'&{0}\n'.format(namelist_name))
                # namelist content; set to {} if not present, so that we leave an empty namelist
                namelist = parameters.pop(namelist_name, {})
                for key, value in sorted(six.iteritems(namelist)):
                    infile.write(convert_input_to_namelist_entry(key, value))
                infile.write(u'/\n')

            # add list of qpoints if required
            if postpend_text is not None:
                infile.write(postpend_text)

        if parameters:
            raise exceptions.InputValidationError(
                'The following namelists are specified in parameters, but are '
                'not valid namelists for the current type of calculation: '
                '{}'.format(','.join(list(parameters.keys()))))

        # copy the parent scratch
        symlink = settings.pop('PARENT_FOLDER_SYMLINK',
                               self._default_symlink_usage)  # a boolean
        if symlink:
            # I create a symlink to each file/folder in the parent ./out
            folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True)

            remote_symlink_list.append(
                (parent_folder.computer.uuid,
                 os.path.join(parent_folder.get_remote_path(),
                              parent_calc_out_subfolder,
                              '*'), self._OUTPUT_SUBFOLDER))

            # I also create a symlink for the ./pseudo folder
            # TODO: suppress this when the recover option of QE will be fixed
            # (bug when trying to find pseudo file)
            remote_symlink_list.append(
                (parent_folder.computer.uuid,
                 os.path.join(parent_folder.get_remote_path(),
                              self._get_pseudo_folder()),
                 self._get_pseudo_folder()))
        else:
            # here I copy the whole folder ./out
            remote_copy_list.append(
                (parent_folder.computer.uuid,
                 os.path.join(parent_folder.get_remote_path(),
                              parent_calc_out_subfolder),
                 self._OUTPUT_SUBFOLDER))
            # I also copy the ./pseudo folder
            # TODO: suppress this when the recover option of QE will be fixed
            # (bug when trying to find pseudo file)
            remote_copy_list.append(
                (parent_folder.computer.uuid,
                 os.path.join(parent_folder.get_remote_path(),
                              self._get_pseudo_folder()),
                 self._get_pseudo_folder()))

        if restart_flag:  # in this case, copy in addition also the dynamical matrices
            if symlink:
                remote_symlink_list.append(
                    (parent_folder.computer.uuid,
                     os.path.join(parent_folder.get_remote_path(),
                                  self._FOLDER_DYNAMICAL_MATRIX),
                     self._FOLDER_DYNAMICAL_MATRIX))

            else:
                # copy the dynamical matrices
                # no need to copy the _ph0, since I copied already the whole ./out folder
                remote_copy_list.append(
                    (parent_folder.computer.uuid,
                     os.path.join(parent_folder.get_remote_path(),
                                  self._FOLDER_DYNAMICAL_MATRIX), '.'))

        # Create an `.EXIT` file if `only_initialization` flag in `settings` is set to `True`
        if settings.pop('ONLY_INITIALIZATION', False):
            with folder.open('{}.EXIT'.format(self._PREFIX), 'w') as handle:
                handle.write('\n')

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = (
            list(settings.pop('CMDLINE', [])) +
            ['-in', self.metadata.options.input_filename])
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list

        # Retrieve by default the output file and the xml file
        filepath_xml_tensor = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0',
                                           '{}.phsave'.format(self._PREFIX))
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(self.metadata.options.output_filename)
        calcinfo.retrieve_list.append(self._FOLDER_DYNAMICAL_MATRIX)
        calcinfo.retrieve_list.append(
            os.path.join(filepath_xml_tensor,
                         self._OUTPUT_XML_TENSOR_FILE_NAME))
        calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])

        if settings:
            unknown_keys = ', '.join(list(settings.keys()))
            raise exceptions.InputValidationError(
                '`settings` contained unexpected keys: {}'.format(
                    unknown_keys))

        return calcinfo
Exemplo n.º 25
0
    def prepare_for_submission(self, folder: folders.Folder):
        """Create input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        # To be filled out below
        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        # Create the subfolders for pseudopotentials and orbitals
        folder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True)
        folder.get_subfolder(self._ORBITAL_SUBFOLDER, create=True)

        # Get an uppercase-key-only version of the settings dictionary (also check for case-insensitive duplicates)
        if 'settings' in self.inputs:
            settings = uppercase_dict_keys(self.inputs.settings.get_dict(),
                                           dict_name='settings')
        else:
            settings = {}

        # Get an uppercase-key-only verion of the parameters dictionary (also check for case-insensitive duplicates)
        parameters = uppercase_dict_keys(self.inputs.parameters.get_dict(),
                                         dict_name='parameters')

        # No reserved parameter keywords should be provided
        self._check_reserved_keywords(parameters)

        # Load parameter schema
        with open(self._INPUT_SCHEMA, 'r') as stream:
            schema = json.load(stream)

        # Automatically generate input parameters for derived fields, e.g. structure -> Atoms.Unitvectors, etc.
        parameters = self._generate_input_parameters(
            self.inputs.structure, self.inputs.kpoints, parameters,
            self.inputs.pseudos, self.inputs.orbitals,
            self.inputs.orbital_configurations)

        # Get a lowercase-value-only version of the parameters dictionary
        parameters = lowercase_dict_values(parameters)

        # Validate input parameters
        self._validate_inputs(self.inputs.structure, self.inputs.kpoints,
                              parameters, self.inputs.pseudos,
                              self.inputs.orbitals,
                              self.inputs.orbital_configurations, schema)

        # Get input file contents and lists of the pseudopotential and orbital files which need to be copied
        input_file_content = write_input_file(parameters, schema)
        local_copy_pseudo_list, local_copy_orbital_list = self._generate_local_copy_lists(
            self.inputs.pseudos, self.inputs.orbitals)

        local_copy_list += local_copy_pseudo_list
        local_copy_list += local_copy_orbital_list

        # Add output files to retrieve which have been specified to write in the input parameters
        retrieve_list = []
        if parameters.get('BAND_NKPATH', 0) > 0 and parameters.get(
                'SCF_EIGENVALUESOLVER', 'band') == 'band':
            retrieve_list.append(self._DATAFILE_BAND_FILE)
        if parameters.get('MD_TYPE', 'nomd') != 'nomd':
            retrieve_list.append(self._DATAFILE_MD_FILE)
            retrieve_list.append(self._DATAFILE_MD2_FILE)

        # Write input file
        with folder.open(self._INPUT_FILE, 'w') as handle:
            handle.write(input_file_content)

        # Fill out the `CodeInfo`
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.withmpi = True
        codeinfo.cmdline_params = ([self._INPUT_FILE] +
                                   list(settings.pop('CMDLINE', [])))
        codeinfo.stdout_name = self._OUTPUT_FILE

        # Fill out the `CalcInfo`
        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list
        calcinfo.retrieve_list = retrieve_list
        calcinfo.retrieve_list.append(self._OUTPUT_FILE)
        calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])

        # TODO: pop parser settings and report remaining unknown settings

        return calcinfo
Exemplo n.º 26
0
    def prepare_for_submission(self, folder):  #pylint: disable=too-many-locals, too-many-statements # noqa:  disable=MC0001
        """
        Routine which creates the input file of Wannier90
        :param folder: a aiida.common.folders.Folder subclass where
            the plugin should put all its files.
        """
        self._validate_input_output_names()

        param_dict = self.inputs.parameters.get_dict()
        self._validate_lowercase(param_dict)
        self._validate_input_parameters(param_dict)

        if 'settings' in self.inputs:
            settings_dict = self.inputs.settings.get_dict()
        else:
            settings_dict = {}
        self._validate_lowercase(settings_dict)

        pp_setup = settings_dict.pop('postproc_setup', False)
        if pp_setup:
            param_dict.update({'postproc_setup': True})

        has_local_input = 'local_input_folder' in self.inputs
        has_remote_input = 'remote_input_folder' in self.inputs
        if pp_setup:
            if has_local_input or has_local_input:
                raise exc.InputValidationError(
                    "Can not set 'local_input_folder' or 'remote_input_folder' "
                    "with the 'postproc_setup' option.")

        else:
            if has_local_input and has_remote_input:
                raise exc.InputValidationError(
                    "Both the 'local_input_folder' and 'remote_input_folder' "
                    "inputs are set, but they are exclusive. Exactly one of "
                    "the two must be given.")
            if not (has_local_input or has_remote_input):
                raise exc.InputValidationError(
                    "None of the 'local_input_folder' and 'remote_input_folder' "
                    "inputs is set. Exactly one of the two must be given.")

        ############################################################
        # End basic check on inputs
        ############################################################
        random_projections = settings_dict.pop('random_projections', False)

        write_win(
            filename=folder.get_abs_path('{}.win'.format(self._SEEDNAME)),
            parameters=param_dict,
            structure=self.inputs.structure,
            kpoints=self.inputs.kpoints,
            kpoint_path=getattr(self.inputs, 'kpoint_path', None),
            projections=getattr(self.inputs, 'projections', None),
            random_projections=random_projections,
        )

        input_file_lists = self._get_input_file_lists(pp_setup=pp_setup)

        #######################################################################

        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.local_copy_list = input_file_lists.local_copy_list + settings_dict.pop(
            "additional_local_copy_list", [])
        calcinfo.remote_copy_list = input_file_lists.remote_copy_list + settings_dict.pop(
            "additional_remote_copy_list", [])
        calcinfo.remote_symlink_list = input_file_lists.remote_symlink_list + settings_dict.pop(
            "additional_remote_symlink_list", [])

        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.cmdline_params = [self._SEEDNAME]

        calcinfo.codes_info = [codeinfo]
        calcinfo.codes_run_mode = datastructures.CodeRunMode.SERIAL

        retrieve_list = [
            self._SEEDNAME + suffix
            for suffix in self._DEFAULT_RETRIEVE_SUFFIXES
        ]
        exclude_retrieve_list = settings_dict.pop("exclude_retrieve_list", [])
        retrieve_list = [
            filename for filename in retrieve_list if not any(
                fnmatch.fnmatch(filename, pattern)
                for pattern in exclude_retrieve_list)
        ]

        calcinfo.retrieve_list = retrieve_list
        calcinfo.retrieve_temporary_list = []
        if pp_setup:
            # The parser will then put this in a SinglefileData (if present)
            calcinfo.retrieve_temporary_list.append('{}.nnkp'.format(
                self._SEEDNAME))

        # Retrieves bands automatically, if they are calculated

        calcinfo.retrieve_list += settings_dict.pop("additional_retrieve_list",
                                                    [])

        # pop input keys not used here
        settings_dict.pop('seedname', None)
        if settings_dict:
            raise exc.InputValidationError(
                "The following keys in settings are unrecognized: {}".format(
                    list(settings_dict.keys())))

        return calcinfo
Exemplo n.º 27
0
    def prepare_for_submission(self, folder):  # pylint: disable=too-many-branches,too-many-statements
        """Prepare the calculation job for submission by transforming input nodes into input files.

        In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that
        contains lists of files that need to be copied to the remote machine before job submission, as well as file
        lists that are to be retrieved after job completion.

        :param folder: a sandbox folder to temporarily write files on disk.
        :return: :py:`~aiida.common.datastructures.CalcInfo` instance.
        """

        # Put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
        parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters')
        parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()}

        # Same for settings.
        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings')
        else:
            settings = {}

        # Set default values. NOTE: this is different from PW/CP
        for blocked in self._blocked_keywords:
            namelist = blocked[0].upper()
            key = blocked[1].lower()
            value = blocked[2]

            if namelist in parameters:
                if key in parameters[namelist]:
                    raise exceptions.InputValidationError(
                        "You cannot specify explicitly the '{}' key in the '{}' "
                        'namelist.'.format(key, namelist))
            else:
                parameters[namelist] = {}
            parameters[namelist][key] = value

        # Restrict the plot output to the file types that we want to be able to parse
        dimension_to_output_format = {
            0: 0,  # Spherical integration -> Gnuplot, 1D
            1: 0,  # 1D -> Gnuplot, 1D
            2: 7,  # 2D -> Gnuplot, 2D
            3: 6,  # 3D -> Gaussian cube
            4: 0,  # Polar on a sphere -> # Gnuplot, 1D
        }
        parameters['PLOT']['output_format'] = dimension_to_output_format[parameters['PLOT']['iflag']]

        namelists_toprint = self._default_namelists

        input_filename = self.inputs.metadata.options.input_filename
        with folder.open(input_filename, 'w') as infile:
            for namelist_name in namelists_toprint:
                infile.write('&{0}\n'.format(namelist_name))
                # namelist content; set to {} if not present, so that we leave an empty namelist
                namelist = parameters.pop(namelist_name, {})
                for key, value in sorted(namelist.items()):
                    infile.write(convert_input_to_namelist_entry(key, value))
                infile.write('/\n')

        # Check for specified namelists that are not expected
        if parameters:
            raise exceptions.InputValidationError(
                'The following namelists are specified in parameters, but are '
                'not valid namelists for the current type of calculation: '
                '{}'.format(','.join(list(parameters.keys()))))

        remote_copy_list = []
        local_copy_list = []

        # Copy remote output dir
        parent_calc_folder = self.inputs.get('parent_folder', None)
        if isinstance(parent_calc_folder, orm.RemoteData):
            remote_copy_list.append((
                parent_calc_folder.computer.uuid,
                os.path.join(parent_calc_folder.get_remote_path(), self._INPUT_SUBFOLDER),
                self._OUTPUT_SUBFOLDER
            ))
            remote_copy_list.append((
                parent_calc_folder.computer.uuid,
                os.path.join(parent_calc_folder.get_remote_path(), self._PSEUDO_SUBFOLDER),
                self._PSEUDO_SUBFOLDER
            ))
        elif isinstance(parent_calc_folder, orm.FolderData):
            for filename in parent_calc_folder.list_object_names():
                local_copy_list.append((
                    parent_calc_folder.uuid,
                    filename,
                    os.path.join(self._OUTPUT_SUBFOLDER, filename)
                ))
                local_copy_list.append((
                    parent_calc_folder.uuid,
                    filename,
                    os.path.join(self._PSEUDO_SUBFOLDER, filename)
                ))

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = settings.pop('CMDLINE', [])
        codeinfo.stdin_name = self.inputs.metadata.options.input_filename
        codeinfo.stdout_name = self.inputs.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list

        # Retrieve by default the output file
        calcinfo.retrieve_list = [self.inputs.metadata.options.output_filename]
        calcinfo.retrieve_temporary_list = []

        # Depending on the `plot_num` and the corresponding parameters, more than one pair of `filplot` + `fileout`
        # files may be written. In that case, the data files will have `filplot` as a prefix with some suffix to
        # distinguish them from one another. The `fileout` filename will be the full data filename with the `fileout`
        # value as a suffix.
        retrieve_tuples = [
            self._FILEOUT,
            ('{}_*{}'.format(self._FILPLOT, self._FILEOUT), '.', 0)
        ]

        if self.inputs.metadata.options.keep_plot_file:
            calcinfo.retrieve_list.extend(retrieve_tuples)
        else:
            calcinfo.retrieve_temporary_list.extend(retrieve_tuples)

        return calcinfo
Exemplo n.º 28
0
    def prepare_for_submission(self, folder):
        """
        Create the input files from the input nodes passed to this instance of the `CalcJob`.
        """
        # pylint: disable=too-many-locals

        # Generate the content of the structure file based on the input
        # structure
        structure_filecontent, _ = generate_lammps_structure(
            self.inputs.structure,
            self.inputs.potential.atom_style,
        )

        # Get the name of the structure file and write it to the remote folder
        _structure_filename = self.inputs.metadata.options.structure_filename

        with folder.open(_structure_filename, 'w') as handle:
            handle.write(structure_filecontent)

        # Get the parameters dictionary so that they can be used for creating
        # the input file
        _parameters = self.inputs.parameters.get_dict()

        # Get the name of the trajectory file
        _trajectory_filename = self.inputs.metadata.options.trajectory_filename

        # Get the name of the variables file
        _variables_filename = self.inputs.metadata.options.variables_filename

        # Get the name of the restart file
        _restart_filename = self.inputs.metadata.options.restart_filename

        # Get the name of the output file
        _output_filename = self.inputs.metadata.options.output_filename

        # Get the name of the logfile file
        _logfile_filename = self.inputs.metadata.options.logfile_filename

        # If there is a restartfile set its name to the input variables and
        # write it in the remote folder
        if 'input_restartfile' in self.inputs:
            _read_restart_filename = self._DEFAULT_READ_RESTART_FILENAME
            with folder.open(_read_restart_filename, 'wb') as handle:
                handle.write(self.inputs.input_restartfile.get_content())
        else:
            _read_restart_filename = None

        # Write the input file content. This function will also check the
        # sanity of the passed paremters when comparing it to a schema
        input_filecontent = generate_input_file(
            potential=self.inputs.potential,
            structure=self.inputs.structure,
            parameters=_parameters,
            restart_filename=_restart_filename,
            trajectory_filename=_trajectory_filename,
            variables_filename=_variables_filename,
            read_restart_filename=_read_restart_filename,
        )

        # Get the name of the input file, and write it to the remote folder
        _input_filename = self.inputs.metadata.options.input_filename

        with folder.open(_input_filename, 'w') as handle:
            handle.write(input_filecontent)

        # Write the potential to the remote folder
        with folder.open(self._DEFAULT_POTENTIAL_FILENAME, 'w') as handle:
            handle.write(self.inputs.potential.get_content())

        codeinfo = datastructures.CodeInfo()
        # Command line variables to ensure that the input file from LAMMPS can
        # be read
        codeinfo.cmdline_params = [
            '-in', _input_filename, '-log', _logfile_filename
        ]
        # Set the code uuid
        codeinfo.code_uuid = self.inputs.code.uuid
        # Set the name of the stdout
        codeinfo.stdout_name = _output_filename
        # Set whether or not one is running with MPI
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # Generate the datastructure for the calculation information
        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        # Set the files that must be retrieved
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(_output_filename)
        calcinfo.retrieve_list.append(_logfile_filename)
        calcinfo.retrieve_list.append(_restart_filename)
        calcinfo.retrieve_list.append(_variables_filename)
        calcinfo.retrieve_list.append(_trajectory_filename)
        # Set the information of the code into the calculation datastructure
        calcinfo.codes_info = [codeinfo]

        return calcinfo
Exemplo n.º 29
0
    def prepare_for_submission(self, folder):
        self.inputs.metadata.options.parser_name = 'z2pack.z2pack'
        self.inputs.metadata.options.output_filename = self._OUTPUT_Z2PACK_FILE
        self.inputs.metadata.options.input_filename = self._INPUT_Z2PACK_FILE

        calcinfo = datastructures.CalcInfo()

        codeinfo = datastructures.CodeInfo()
        codeinfo.stdout_name = self._OUTPUT_Z2PACK_FILE
        codeinfo.stdin_name = self._INPUT_Z2PACK_FILE
        codeinfo.code_uuid = self.inputs.code.uuid
        calcinfo.codes_info = [codeinfo]

        calcinfo.codes_run_mode = datastructures.CodeRunMode.SERIAL
        calcinfo.cmdline_params = []

        calcinfo.retrieve_list = []
        calcinfo.retrieve_temporary_list = []
        calcinfo.local_copy_list = []
        calcinfo.remote_copy_list = []
        calcinfo.remote_symlink_list = []

        inputs = [
            self._INPUT_PW_NSCF_FILE,
            self._INPUT_OVERLAP_FILE,
            self._INPUT_W90_FILE,
        ]
        outputs = [
            self._OUTPUT_Z2PACK_FILE,
            self._OUTPUT_SAVE_FILE,
            self._OUTPUT_RESULT_FILE,
        ]
        errors = [
            os.path.join('build', a)
            for a in [self._ERROR_W90_FILE, self._ERROR_PW_FILE]
        ]

        calcinfo.retrieve_list.extend(outputs)
        calcinfo.retrieve_list.extend(errors)

        parent = self.inputs.parent_folder
        rpath = parent.get_remote_path()
        uuid = parent.computer.uuid
        parent_type = parent.creator.process_class

        if parent_type == Z2packCalculation:
            self._set_inputs_from_parent_z2pack()
        elif parent_type == PwCalculation:
            self._set_inputs_from_parent_scf()

        pw_dct = _lowercase_dict(self.inputs.pw_parameters.get_dict(),
                                 'pw_dct')
        sys = pw_dct['system']
        if sys.get('noncolin', False) and sys.get('lspinorb', False):
            self._blocked_keywords_wannier90.append(('spinors', True))

        try:
            settings = _lowercase_dict(self.inputs.z2pack_settings.get_dict(),
                                       'z2pack_settings')
        except AttributeError:
            raise exceptions.InputValidationError(
                'Must provide `z2pack_settings` input for `scf` calculation.')
        symlink = settings.get('parent_folder_symlink', False)
        self.restart_mode = settings.get('restart_mode', True)
        ptr = calcinfo.remote_symlink_list if symlink else calcinfo.remote_copy_list

        if parent_type == PwCalculation:
            prepare_nscf(self, folder)
            prepare_overlap(self, folder)
            prepare_wannier90(self, folder)
        elif parent_type == Z2packCalculation:
            if self.restart_mode:
                calcinfo.remote_copy_list.append((
                    uuid,
                    os.path.join(rpath, self._OUTPUT_SAVE_FILE),
                    self._OUTPUT_SAVE_FILE,
                ))

            calcinfo.remote_copy_list.extend([(uuid, os.path.join(rpath,
                                                                  inp), inp)
                                              for inp in inputs])
        else:
            raise exceptions.ValidationError(
                'parent node must be either from a PWscf or a Z2pack calculation.'
            )

        parent_files = [self._PSEUDO_SUBFOLDER, self._OUTPUT_SUBFOLDER]
        ptr.extend([(uuid, os.path.join(rpath, fname), fname)
                    for fname in parent_files])

        prepare_z2pack(self, folder)

        return calcinfo