Exemplo n.º 1
0
    def test_submit_script_bad_shebang(self):
        """
        Test to verify if scripts works fine with default options
        """
        from aiida.schedulers.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, CodeRunMode

        scheduler = PbsproScheduler()
        code_info = CodeInfo()
        code_info.cmdline_params = ['mpirun', '-np', '23', 'pw.x', '-npool', '1']
        code_info.stdin_name = 'aiida.in'

        for (shebang, expected_first_line) in ((None, '#!/bin/bash'), ('', ''), ('NOSET', '#!/bin/bash')):
            job_tmpl = JobTemplate()
            if shebang == 'NOSET':
                pass
            else:
                job_tmpl.shebang = shebang
            job_tmpl.job_resource = scheduler.create_job_resource(num_machines=1, num_mpiprocs_per_machine=1)
            job_tmpl.codes_info = [code_info]
            job_tmpl.codes_run_mode = CodeRunMode.SERIAL

            submit_script_text = scheduler.get_submit_script(job_tmpl)

            # This tests if the implementation correctly chooses the default:
            self.assertEqual(submit_script_text.split('\n')[0], expected_first_line)
Exemplo n.º 2
0
    def test_submit_script_with_num_cores_per_machine_and_mpiproc1(self):  # pylint: disable=invalid-name
        """
        Test to verify if scripts works fine if we pass both
        num_cores_per_machine and num_cores_per_mpiproc correct values.
        It should pass in check:
        res.num_cores_per_mpiproc * res.num_mpiprocs_per_machine = res.num_cores_per_machine
        """
        from aiida.schedulers.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, CodeRunMode

        scheduler = SlurmScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.shebang = '#!/bin/bash'
        job_tmpl.job_resource = scheduler.create_job_resource(
            num_machines=1, num_mpiprocs_per_machine=1, num_cores_per_machine=24, num_cores_per_mpiproc=24
        )
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = ['mpirun', '-np', '23', 'pw.x', '-npool', '1']
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = CodeRunMode.SERIAL

        submit_script_text = scheduler.get_submit_script(job_tmpl)

        assert '#SBATCH --no-requeue' in submit_script_text
        assert '#SBATCH --time=1-00:00:00' in submit_script_text
        assert '#SBATCH --nodes=1' in submit_script_text
        assert '#SBATCH --ntasks-per-node=1' in submit_script_text
        assert '#SBATCH --cpus-per-task=24' in submit_script_text

        assert "'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text
Exemplo n.º 3
0
    def prepare_for_submission(self, folder: Folder) -> CalcInfo:
        """Prepare the calculation for submission.

        :param folder: a temporary folder on the local file system.
        :returns: the `CalcInfo` instance
        """
        echo_value = "fail" if self.node.get_option(
            "fail_calcjob") else "success"
        with folder.open(self.options.input_filename, "w",
                         encoding="utf8") as handle:
            handle.write(f"sleep {self.inputs.time.value}\n")
            handle.write(f'echo "{echo_value}"\n')

        with folder.open(self.options.payload_filename, "wb") as handle:
            json.dump(self.inputs.payload.get_dict(), handle)

        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdin_name = self.options.input_filename
        codeinfo.stdout_name = self.options.output_filename

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.retrieve_list = [
            self.options.output_filename,
            self.options.payload_filename,
        ]

        return calcinfo
Exemplo n.º 4
0
    def _prepare_for_submission(self, tempfolder, inputdict):
        """
        Writes the four minimum output files, INCAR, POSCAR, POTCAR, KPOINTS.

        Delegates the construction and writing / copying to write_<file> methods.
        That way, subclasses can use any form of input nodes and just
        have to implement the write_xxx method accordingly.
        Subclasses can extend by calling the super method and if neccessary
        modifying it's output CalcInfo before returning it.
        """
        # write input files
        incar = tempfolder.get_abs_path('INCAR')
        structure = tempfolder.get_abs_path('POSCAR')
        potentials = tempfolder.get_abs_path('POTCAR')
        kpoints = tempfolder.get_abs_path('KPOINTS')

        self.verify_inputs(inputdict)
        self.write_incar(inputdict, incar)
        self.write_poscar(inputdict, structure)
        self.write_potcar(inputdict, potentials)
        self.write_kpoints(inputdict, kpoints)
        self.write_additional(tempfolder, inputdict)

        # calcinfo
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.retrieve_list = self.max_retrieve_list()
        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.get_code().uuid
        codeinfo.code_pk = self.get_code().pk
        calcinfo.codes_info = [codeinfo]

        return calcinfo
Exemplo n.º 5
0
    def test_submit_script_bad_shebang(self):
        from aiida.scheduler.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, code_run_modes

        scheduler = SlurmScheduler()
        code_info = CodeInfo()
        code_info.cmdline_params = [
            "mpirun", "-np", "23", "pw.x", "-npool", "1"
        ]
        code_info.stdin_name = 'aiida.in'

        for (shebang, expected_first_line) in ((None, '#!/bin/bash'), ("", ""),
                                               ("NOSET", '#!/bin/bash')):
            job_tmpl = JobTemplate()
            if shebang == "NOSET":
                pass
            else:
                job_tmpl.shebang = shebang
            job_tmpl.job_resource = scheduler.create_job_resource(
                num_machines=1, num_mpiprocs_per_machine=1)
            job_tmpl.codes_info = [code_info]
            job_tmpl.codes_run_mode = code_run_modes.SERIAL

            submit_script_text = scheduler.get_submit_script(job_tmpl)

            # This tests if the implementation correctly chooses the default:
            self.assertEquals(
                submit_script_text.split('\n')[0], expected_first_line)
Exemplo n.º 6
0
    def prepare_for_submission(self, folder):
        """
        This method is called prior to job submission with a set of calculation input nodes.
        The inputs will be validated and sanitized, after which the necessary input files will
        be written to disk in a temporary folder. A CalcInfo instance will be returned that contains
        lists of files that need to be copied to the remote machine before job submission, as well
        as file lists that are to be retrieved after job completion.

        :param folder: an aiida.common.folders.Folder to temporarily write files on disk
        :returns: CalcInfo instance
        """
        input_x = self.inputs.x
        input_y = self.inputs.y
        input_code = self.inputs.code

        self.write_input_files(folder, input_x, input_y)

        retrieve_list = self.get_retrieve_list()
        local_copy_list = self.get_local_copy_list()
        remote_copy_list = self.get_remote_copy_list()

        codeinfo = CodeInfo()
        codeinfo.cmdline_params = ['-in', self.options.input_filename]
        codeinfo.stdout_name = self.options.output_filename
        codeinfo.code_uuid = input_code.uuid

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.retrieve_list = retrieve_list
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list

        return calcinfo
Exemplo n.º 7
0
    def test_submit_script_with_num_cores_per_machine(self):
        """
        Test to verify if script works fine if we specify only
        num_cores_per_machine value.
        """
        from aiida.scheduler.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, code_run_modes

        s = PbsproScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.job_resource = s.create_job_resource(
            num_machines=1,
            num_mpiprocs_per_machine=2,
            num_cores_per_machine=24
        )
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = ["mpirun", "-np", "23",
                                    "pw.x", "-npool", "1"]
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = code_run_modes.SERIAL

        submit_script_text = s.get_submit_script(job_tmpl)

        self.assertTrue('#PBS -r n' in submit_script_text)
        self.assertTrue(submit_script_text.startswith('#!/bin/bash'))

        self.assertTrue('#PBS -l select=1:mpiprocs=2' in submit_script_text)
        # Note: here 'num_cores_per_machine' should NOT override the mpiprocs

        self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" +
                        " < 'aiida.in'" in submit_script_text)
Exemplo n.º 8
0
    def test_submit_script_with_num_cores_per_mpiproc(self):
        """
        Test to verify if scripts works fine if we pass only 
        num_cores_per_mpiproc value
        """
        from aiida.scheduler.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, code_run_modes

        s = SlurmScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.job_resource = s.create_job_resource(
            num_machines=1,
            num_mpiprocs_per_machine=1,
            num_cores_per_mpiproc=24)
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = [
            "mpirun", "-np", "23", "pw.x", "-npool", "1"
        ]
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = code_run_modes.SERIAL

        submit_script_text = s.get_submit_script(job_tmpl)

        self.assertTrue('#SBATCH --no-requeue' in submit_script_text)
        self.assertTrue('#SBATCH --time=1-00:00:00' in submit_script_text)
        self.assertTrue('#SBATCH --nodes=1' in submit_script_text)
        self.assertTrue('#SBATCH --ntasks-per-node=1' in submit_script_text)
        self.assertTrue('#SBATCH --cpus-per-task=24' in submit_script_text)

        self.assertTrue( "'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + \
                         " < 'aiida.in'" in submit_script_text )
Exemplo n.º 9
0
    def test_submit_script(self):
        """
        Test the creation of a simple submission script.
        """
        from aiida.scheduler.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, code_run_modes

        s = SlurmScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.job_resource = s.create_job_resource(
            num_machines=1, num_mpiprocs_per_machine=1)
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = [
            "mpirun", "-np", "23", "pw.x", "-npool", "1"
        ]
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = code_run_modes.SERIAL

        submit_script_text = s.get_submit_script(job_tmpl)

        self.assertTrue(submit_script_text.startswith('#!/bin/bash'))

        self.assertTrue('#SBATCH --no-requeue' in submit_script_text)
        self.assertTrue('#SBATCH --time=1-00:00:00' in submit_script_text)
        self.assertTrue('#SBATCH --nodes=1' in submit_script_text)

        self.assertTrue( "'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + \
                         " < 'aiida.in'" in submit_script_text )
Exemplo n.º 10
0
    def test_submit_script(self):
        """
        Test the creation of a simple submission script.
        """
        from aiida.schedulers.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, CodeRunMode

        scheduler = LsfScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.shebang = '#!/bin/bash'
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.job_resource = scheduler.create_job_resource(tot_num_mpiprocs=2, parallel_env='b681e480bd.cern.ch')
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = ['mpirun', '-np', '2', 'pw.x', '-npool', '1']
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = CodeRunMode.SERIAL

        submit_script_text = scheduler.get_submit_script(job_tmpl)

        self.assertTrue(submit_script_text.startswith('#!/bin/bash'))

        self.assertTrue('#BSUB -rn' in submit_script_text)
        self.assertTrue('#BSUB -W 24:00' in submit_script_text)
        self.assertTrue('#BSUB -n 2' in submit_script_text)

        self.assertTrue("'mpirun' '-np' '2' 'pw.x' '-npool' '1'" + \
                        " < 'aiida.in'" in submit_script_text)
Exemplo n.º 11
0
    def prepare_for_submission(self, folder: Folder) -> CalcInfo:
        """Prepare the calculation for submission.

        Convert the input nodes into the corresponding input files in the format that the code will expect. In addition,
        define and return a `CalcInfo` instance, which is a simple data structure that contains information for the
        engine, for example, on what files to copy to the remote machine, what files to retrieve once it has completed,
        specific scheduler settings and more.

        :param folder: a temporary folder on the local file system.
        :returns: the `CalcInfo` instance
        """
        with folder.open(self.options.input_filename, 'w',
                         encoding='utf8') as handle:
            handle.write('echo $(({x} + {y}))\n'.format(x=self.inputs.x.value,
                                                        y=self.inputs.y.value))

        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdin_name = self.options.input_filename
        codeinfo.stdout_name = self.options.output_filename

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.retrieve_list = [self.options.output_filename]

        return calcinfo
Exemplo n.º 12
0
    def test_submit_script(self):
        """
        Test to verify if scripts works fine with default options
        """
        from aiida.scheduler.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, code_run_modes

        s = PbsproScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.job_resource = s.create_job_resource(num_machines=1, num_mpiprocs_per_machine=1)
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = ["mpirun", "-np", "23", "pw.x", "-npool", "1"]
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = code_run_modes.SERIAL

        submit_script_text = s.get_submit_script(job_tmpl)

        self.assertTrue('#PBS -r n' in submit_script_text)
        self.assertTrue(submit_script_text.startswith('#!/bin/bash'))
        self.assertTrue('#PBS -l walltime=24:00:00' in submit_script_text)
        self.assertTrue('#PBS -l select=1' in submit_script_text)
        self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + \
                        " < 'aiida.in'" in submit_script_text)
Exemplo n.º 13
0
def test_upload_local_copy_list(fixture_sandbox, aiida_localhost,
                                aiida_local_code_factory):
    """Test the ``local_copy_list`` functionality in ``upload_calculation``.

    Specifically, verify that files in the ``local_copy_list`` do not end up in the repository of the node.
    """
    from aiida.common.datastructures import CalcInfo, CodeInfo
    from aiida.orm import CalcJobNode, SinglefileData

    inputs = {
        'file_a': SinglefileData(io.BytesIO(b'content_a')).store(),
        'file_b': SinglefileData(io.BytesIO(b'content_b')).store(),
    }

    node = CalcJobNode(computer=aiida_localhost)
    node.store()

    code = aiida_local_code_factory('arithmetic.add', '/bin/bash').store()
    code_info = CodeInfo()
    code_info.code_uuid = code.uuid

    calc_info = CalcInfo()
    calc_info.uuid = node.uuid
    calc_info.codes_info = [code_info]
    calc_info.local_copy_list = [
        (inputs['file_a'].uuid, inputs['file_a'].filename, './files/file_a'),
        (inputs['file_a'].uuid, inputs['file_a'].filename, './files/file_b'),
    ]

    with LocalTransport() as transport:
        execmanager.upload_calculation(node, transport, calc_info,
                                       fixture_sandbox)

    assert node.list_object_names() == []
Exemplo n.º 14
0
    def test_submit_script(self):
        """
        Test the creation of a simple submission script.
        """
        from aiida.schedulers.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, CodeRunMode

        scheduler = SlurmScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.shebang = '#!/bin/bash'
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.job_resource = scheduler.create_job_resource(num_machines=1, num_mpiprocs_per_machine=1)
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = ['mpirun', '-np', '23', 'pw.x', '-npool', '1']
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = CodeRunMode.SERIAL

        submit_script_text = scheduler.get_submit_script(job_tmpl)

        assert submit_script_text.startswith('#!/bin/bash')

        assert '#SBATCH --no-requeue' in submit_script_text
        assert '#SBATCH --time=1-00:00:00' in submit_script_text
        assert '#SBATCH --nodes=1' in submit_script_text

        assert "'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text
Exemplo n.º 15
0
    def _prepare_for_submission(self, tempfolder, inputdict):
        """
        Create input files.

            :param tempfolder: aiida.common.folders.Folder subclass where
                the plugin should put all its files.
            :param inputdict: dictionary of the input nodes as they would
                be returned by get_inputs_dict
        """
        parameters, code, structure, surface_sample = \
                self._validate_inputs(inputdict)

        # Prepare CalcInfo to be returned to aiida
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.local_copy_list = [
            [structure.get_file_abs_path(), structure.filename],
            [surface_sample.get_file_abs_path(), surface_sample.filename],
        ]
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = parameters.output_files

        codeinfo = CodeInfo()
        # will call ./code.py in.json out.json
        codeinfo.cmdline_params = parameters.cmdline_params(
            structure_file_name=structure.filename,
            surface_sample_file_name=surface_sample.filename)
        codeinfo.code_uuid = code.uuid
        calcinfo.codes_info = [codeinfo]

        return calcinfo
Exemplo n.º 16
0
    def prepare_for_submission(self, folder):
        """Write the input files that are required for the code to run.

        :param folder: an `~aiida.common.folders.Folder` to temporarily write files on disk
        :return: `~aiida.common.datastructures.CalcInfo` instance
        """
        input_x = self.inputs.x
        input_y = self.inputs.y

        # Write the input file based on the inputs that were passed
        with folder.open(self.options.input_filename, 'w',
                         encoding='utf8') as handle:
            handle.write(f'{input_x.value} {input_y.value}\n')

        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdout_name = self.options.output_filename
        codeinfo.cmdline_params = ['-in', self.options.input_filename]

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = []
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = []

        return calcinfo
Exemplo n.º 17
0
    def test_submit_script_with_num_cores_per_machine(self):
        """
        Test to verify if script works fine if we specify only
        num_cores_per_machine value.
        """
        from aiida.schedulers.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, CodeRunMode

        scheduler = SlurmScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.shebang = '#!/bin/bash'
        job_tmpl.job_resource = scheduler.create_job_resource(
            num_machines=1, num_mpiprocs_per_machine=2, num_cores_per_machine=24)
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = ['mpirun', '-np', '23', 'pw.x', '-npool', '1']
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = CodeRunMode.SERIAL

        submit_script_text = scheduler.get_submit_script(job_tmpl)

        self.assertTrue('#SBATCH --no-requeue' in submit_script_text)
        self.assertTrue('#SBATCH --time=1-00:00:00' in submit_script_text)
        self.assertTrue('#SBATCH --nodes=1' in submit_script_text)
        self.assertTrue('#SBATCH --ntasks-per-node=2' in submit_script_text)
        self.assertTrue('#SBATCH --cpus-per-task=12' in submit_script_text)

        self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + \
                        " < 'aiida.in'" in submit_script_text)
Exemplo n.º 18
0
    def test_submit_script_with_num_cores_per_mpiproc(self):
        """
        Test to verify if scripts works fine if we pass only
        num_cores_per_mpiproc value
        """
        from aiida.schedulers.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, CodeRunMode

        scheduler = TorqueScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.shebang = '#!/bin/bash'
        job_tmpl.job_resource = scheduler.create_job_resource(
            num_machines=1,
            num_mpiprocs_per_machine=1,
            num_cores_per_mpiproc=24)
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = [
            'mpirun', '-np', '23', 'pw.x', '-npool', '1'
        ]
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = CodeRunMode.SERIAL

        submit_script_text = scheduler.get_submit_script(job_tmpl)

        self.assertTrue('#PBS -r n' in submit_script_text)
        self.assertTrue(submit_script_text.startswith('#!/bin/bash'))
        self.assertTrue(
            '#PBS -l nodes=1:ppn=24,walltime=24:00:00' in submit_script_text)
        self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" +
                        " < 'aiida.in'" in submit_script_text)
Exemplo n.º 19
0
    def test_submit_script_with_num_cores_per_machine_and_mpiproc1(self):
        """
        Test to verify if scripts works fine if we pass both
        num_cores_per_machine and num_cores_per_mpiproc correct values.
        It should pass in check:
        res.num_cores_per_mpiproc * res.num_mpiprocs_per_machine = res.num_cores_per_machine
        """
        from aiida.schedulers.datastructures import JobTemplate
        from aiida.common.datastructures import CodeInfo, CodeRunMode

        scheduler = PbsproScheduler()

        job_tmpl = JobTemplate()
        job_tmpl.shebang = '#!/bin/bash'
        job_tmpl.job_resource = scheduler.create_job_resource(
            num_machines=1, num_mpiprocs_per_machine=1, num_cores_per_machine=24, num_cores_per_mpiproc=24
        )
        job_tmpl.uuid = str(uuid.uuid4())
        job_tmpl.max_wallclock_seconds = 24 * 3600
        code_info = CodeInfo()
        code_info.cmdline_params = ['mpirun', '-np', '23', 'pw.x', '-npool', '1']
        code_info.stdin_name = 'aiida.in'
        job_tmpl.codes_info = [code_info]
        job_tmpl.codes_run_mode = CodeRunMode.SERIAL

        submit_script_text = scheduler.get_submit_script(job_tmpl)

        self.assertTrue('#PBS -r n' in submit_script_text)
        self.assertTrue(submit_script_text.startswith('#!/bin/bash'))
        self.assertTrue('#PBS -l select=1:mpiprocs=1:ppn=24' in submit_script_text)
        # Note: here 'num_cores_per_machine' should NOT override the mpiprocs

        self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text)
Exemplo n.º 20
0
    def _prepare_for_submission(self, tempfolder, inputdict):
        """
        This is the routine to be called when you want to create
        the input files and related stuff with a plugin.
        
        :param tempfolder: a aiida.common.folders.Folder subclass where
                           the plugin should put all its files.
        :param inputdict: a dictionary with the input nodes, as they would
                be returned by get_inputs_dict (with the Code!)
        """
        try:
            parameters = inputdict.pop(self.get_linkname('parameters'))
        except KeyError:
            raise InputValidationError("No parameters specified for this "
                                       "calculation")
        if not isinstance(parameters, ParameterData):
            raise InputValidationError("parameters is not of type "
                                       "ParameterData")
        try:
            code = inputdict.pop(self.get_linkname('code'))
        except KeyError:
            raise InputValidationError("No code specified for this "
                                       "calculation")
        if inputdict:
            raise ValidationError("Cannot add other nodes beside parameters")

        ##############################
        # END OF INITIAL INPUT CHECK #
        ##############################

        input_json = parameters.get_dict()

        # write all the input to a file
        input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME)
        with open(input_filename, 'w') as infile:
            json.dump(input_json, infile)

        # ============================ calcinfo ================================

        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.local_copy_list = []
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = [self._OUTPUT_FILE_NAME]

        codeinfo = CodeInfo()
        codeinfo.cmdline_params = [
            self._INPUT_FILE_NAME, self._OUTPUT_FILE_NAME
        ]
        codeinfo.code_uuid = code.uuid
        calcinfo.codes_info = [codeinfo]

        return calcinfo
Exemplo n.º 21
0
    def _prepare_for_submission(self, tempfolder, inputdict):
        """
        This is the routine to be called when you want to create
        the input files and related stuff with a plugin.

        :param tempfolder: a aiida.common.folders.Folder subclass where
                           the plugin should put all its files.
        :param inputdict: a dictionary with the input nodes, as they would
                be returned by get_inputdata_dict (without the Code!)
        """

        in_nodes = self._verify_inlinks(inputdict)
        params, structure, code, settings, local_copy_list = in_nodes

        # write cp2k input file
        inp = RaspaInput(params)
        inp_fn = tempfolder.get_abs_path(self._INPUT_FILE_NAME)
        with open(inp_fn, "w") as f:
            f.write(inp.render())

        # create code info
        codeinfo = CodeInfo()
        cmdline = settings.pop('cmdline', [])
        cmdline += [self._INPUT_FILE_NAME]
        codeinfo.cmdline_params = cmdline
        codeinfo.code_uuid = code.uuid

        # create calc info
        calcinfo = CalcInfo()
        calcinfo.stdin_name = self._INPUT_FILE_NAME
        calcinfo.uuid = self.uuid
        calcinfo.cmdline_params = codeinfo.cmdline_params
        calcinfo.stdin_name = self._INPUT_FILE_NAME
#        calcinfo.stdout_name = self._OUTPUT_FILE_NAME
        calcinfo.codes_info = [codeinfo]

        # file lists
        calcinfo.remote_symlink_list = []
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = [[self._OUTPUT_FILE_NAME,'.',0]]
        calcinfo.retrieve_list += settings.pop('additional_retrieve_list', [])


        # check for left over settings
        if settings:
            msg = "The following keys have been found "
            msg += "in the settings input node {}, ".format(self.pk)
            msg += "but were not understood: " + ",".join(settings.keys())
            raise InputValidationError(msg)

        return calcinfo
    def _prepare_for_submission(self, tempfolder, inputdict):
        """
        Create input files.

            :param tempfolder: aiida.common.folders.Folder subclass where
                the plugin should put all its files.
            :param inputdict: dictionary of the input nodes as they would
                be returned by get_inputs_dict
        """
        # Check inputdict
        try:
            parameters = inputdict.pop(self.get_linkname('parameters'))
        except KeyError:
            raise InputValidationError("No parameters specified for this "
                                       "calculation")
        if not isinstance(parameters, MultiplyParameters):
            raise InputValidationError("parameters not of type "
                                       "MultiplyParameters")
        try:
            code = inputdict.pop(self.get_linkname('code'))
        except KeyError:
            raise InputValidationError("No code specified for this "
                                       "calculation")
        if inputdict:
            raise ValidationError("Unknown inputs besides MultiplyParameters")

        # In this example, the input file is simply a json dict.
        # Adapt for your particular code!
        input_dict = parameters.get_dict()

        # Write input to file
        input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME)
        with open(input_filename, 'w') as infile:
            json.dump(input_dict, infile)

        # Prepare CalcInfo to be returned to aiida
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.local_copy_list = []
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = [self._OUTPUT_FILE_NAME]

        codeinfo = CodeInfo()
        # will call ./code.py in.json out.json
        codeinfo.cmdline_params = [
            self._INPUT_FILE_NAME, self._OUTPUT_FILE_NAME
        ]
        codeinfo.code_uuid = code.uuid
        calcinfo.codes_info = [codeinfo]

        return calcinfo
Exemplo n.º 23
0
    def _prepare_for_submission(self, tempfolder, inputdict):
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = []

        codeinfo = CodeInfo()
        codeinfo.cmdline_params = []
        codeinfo.stdout_name = self._OUTPUT_FILE_NAME
        code = inputdict.pop(self.get_linkname('code'))
        codeinfo.code_uuid = code.uuid
        calcinfo.codes_info = [codeinfo]

        return calcinfo
Exemplo n.º 24
0
    def _prepare_for_submission(self, tempfolder, inputdict):
        """
        This is the routine to be called when you want to create
        the input files and related stuff with a plugin.
        :param tempfolder: a aiida.common.folders.Folder subclass where
                           the plugin should put all its files.
        :param inputdict: a dictionary with the input nodes, as they would
                be returned by get_inputdata_dict (without the Code!)
        """
        
        code, params, parent_calc_folder, atomtypes_file = self._verify_inlinks(inputdict)
        
        # ---------------------------------------------------
        # Write params.ini file
        params_fn = tempfolder.get_abs_path("params.ini")
        with open(params_fn, 'w') as f:
            for key, val in params.items():
                line = str(key) + " "
                if isinstance(val, list):
                    line += " ".join(str(v) for v in val)
                else:
                    line += str(val)
                f.write(line + '\n')
        # ---------------------------------------------------
        
        # create code info
        codeinfo = CodeInfo()
        codeinfo.code_uuid = code.uuid
        codeinfo.withmpi = False

        # create calc info
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.codes_info = [codeinfo]

        # file lists
        calcinfo.remote_symlink_list = []
        calcinfo.local_copy_list = [(atomtypes_file.get_file_abs_path(), 'atomtypes.ini')]
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = ["*/*/*.npy"]

        # symlinks
        if parent_calc_folder is not None:
            comp_uuid = parent_calc_folder.get_computer().uuid
            remote_path = parent_calc_folder.get_remote_path()
            symlink = (comp_uuid, remote_path, "parent_calc_folder")
            calcinfo.remote_symlink_list.append(symlink)
        
        return calcinfo
Exemplo n.º 25
0
    def prepare_for_submission(self, folder):
        from aiida.common.datastructures import CalcInfo, CodeInfo

        local_copy_list = [(self.inputs.single_file.uuid,
                            self.inputs.single_file.filename, 'single_file')]

        for name, node in self.inputs.files.items():
            local_copy_list.append((node.uuid, node.filename, name))

        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        return calcinfo
Exemplo n.º 26
0
    def prepare_for_submission(self, folder):
        from aiida.common.datastructures import CalcInfo, CodeInfo

        # Use nested path for the target filename, where the directory does not exist, to check that the engine will
        # create intermediate directories as needed. Regression test for #4350
        local_copy_list = [(self.inputs.single_file.uuid, self.inputs.single_file.filename, 'path/single_file')]

        for name, node in self.inputs.files.items():
            local_copy_list.append((node.uuid, node.filename, name))

        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        return calcinfo
Exemplo n.º 27
0
    def prepare_for_submission(self, folder):
        """Create the input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        from aiida_qeq.data.qeq import DEFAULT_CONFIGURE_FILE_NAME

        try:
            configure = self.inputs.configure
        except AttributeError:
            configure = QeqParameters()

        # Prepare CodeInfo object for aiida
        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.cmdline_params = configure.cmdline_params(
            structure_file_name=self.inputs.structure.filename,
            param_file_name=self.inputs.parameters.filename)
        codeinfo.stdout_name = self._LOG_FILE_NAME

        # write configure.input file
        with io.StringIO(configure.configure_string) as handle:
            folder.create_file_from_filelike(
                handle, filename=DEFAULT_CONFIGURE_FILE_NAME, mode='w')

        # Prepare CalcInfo object for aiida
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.local_copy_list = [
            [
                self.inputs.structure.uuid, self.inputs.structure.filename,
                self.inputs.structure.filename
            ],
            [
                self.inputs.parameters.uuid, self.inputs.parameters.filename,
                self.inputs.parameters.filename
            ],
        ]
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = configure.output_files
        calcinfo.codes_info = [codeinfo]

        return calcinfo
Exemplo n.º 28
0
    def prepare_for_submission(self, folder):
        from aiida.common.datastructures import CalcInfo, CodeInfo

        for key, node in self.inputs.files.items():
            filepath = key.replace('_', os.sep)
            dirname = os.path.dirname(filepath)
            basename = os.path.basename(filepath)
            with node.open(mode='rb') as source:
                if dirname:
                    subfolder = folder.get_subfolder(dirname, create=True)
                    subfolder.create_file_from_filelike(source, basename)
                else:
                    folder.create_file_from_filelike(source, filepath)

        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.provenance_exclude_list = self.inputs.settings.get_attribute('provenance_exclude_list')
        return calcinfo
Exemplo n.º 29
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance

        """
        # Prepare CalcInfo to be returned to aiida
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid

        calcinfo.local_copy_list = [
            (self.inputs.file1.uuid, self.inputs.file1.filename,
             self.inputs.file1.filename),
            (self.inputs.file2.uuid, self.inputs.file2.filename,
             self.inputs.file2.filename),
        ]
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = [self._OUTPUT_FILE_NAME]

        codeinfo = CodeInfo()
        codeinfo.cmdline_params = self.inputs.parameters.cmdline_params(
            file1_name=self.inputs.file1.filename,
            file2_name=self.inputs.file2.filename)
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.withmpi = False
        codeinfo.stdout_name = self._OUTPUT_FILE_NAME

        calcinfo.codes_info = [codeinfo]

        return calcinfo
Exemplo n.º 30
0
    def prepare_for_submission(self, folder):
        """Create the input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        # Prepare CodeInfo object for aiida
        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.cmdline_params = self.inputs.parameters.cmdline_params(
            structure_file_name=self.inputs.structure.filename,
            ionization_file_name=self.inputs.ionization_data.filename,
            charge_file_name=self.inputs.charge_data.filename)
        codeinfo.stdout_name = self._LOG_FILE_NAME

        # Prepare CalcInfo object for aiida
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.local_copy_list = [
            [
                self.inputs.structure.uuid, self.inputs.structure.filename,
                self.inputs.structure.filename
            ],
            [
                self.inputs.ionization_data.uuid,
                self.inputs.ionization_data.filename,
                self.inputs.ionization_data.filename
            ],
            [
                self.inputs.charge_data.uuid, self.inputs.charge_data.filename,
                self.inputs.charge_data.filename
            ],
        ]
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = self.inputs.parameters.output_files(
            self.inputs.structure.filename)
        calcinfo.codes_info = [codeinfo]

        return calcinfo
    def _prepare_for_submission(self, tempfolder, inputdict):

        settings = inputdict.pop(self.get_linkname('settings'), None)
        if settings is None:
            settings_dict = {}
        else:
            if not isinstance(settings, ParameterData):
                raise InputValidationError("settings, if specified, must be of "
                                           "type ParameterData")
            settings_dict = settings.get_dict()

        try:
            code = inputdict.pop(self.get_linkname('code'))
        except KeyError:
            raise InputValidationError("No code specified for this calculation")

        try:
            parameters = inputdict.pop(self.get_linkname('parameters'))
        except KeyError:
            raise InputValidationError("No parameters specified for this calculation")
        if not isinstance(parameters, ParameterData):
            raise InputValidationError("parameters is not of type ParameterData")

        calcinfo = CalcInfo()

        calcinfo.uuid = self.uuid
        # Empty command line by default
        cmdline_params = settings_dict.pop('CMDLINE', [])

        #calcinfo.stdin_name = self._INPUT_FILE_NAME
        #calcinfo.stdout_name = self._OUTPUT_FILE_NAME

        codeinfo = CodeInfo()
        codeinfo.cmdline_params = list(cmdline_params)
        ##calcinfo.stdin_name = self._INPUT_FILE_NAME
        codeinfo.stdout_name = self._OUTPUT_FILE_NAME
        codeinfo.code_uuid = code.uuid
        calcinfo.codes_info = [codeinfo]
        #
        #calcinfo.remote_copy_list = remote_copy_list
        #calcinfo.remote_symlink_list = remote_symlink_list

        calcinfo.local_copy_list = []

        # Retrieve by default the output file and the xml file
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(self._OUTPUT_FILE_NAME)
        calcinfo.retrieve_list.append("info.xml")
        #calcinfo.retrieve_list.append(self._DATAFILE_XML)
        #settings_retrieve_list = settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', [])

        try:
            structure = inputdict.pop(self.get_linkname('structure'))
        except KeyError:
            raise InputValidationError("No structure specified for this calculation")
        if not isinstance(structure, StructureData):
            raise InputValidationError("structure is not of type StructureData")

        try:
            kpoints = inputdict.pop(self.get_linkname('kpoints'))
        except KeyError:
            raise InputValidationError("No kpoints specified for this calculation")
        if not isinstance(kpoints, KpointsData):
            raise InputValidationError("kpoints is not of type KpointsData")

        kmesh, koffset = kpoints.get_kpoints_mesh()
        
        lapw_basis_list = {}
        for link in inputdict.keys():
            if link.startswith("lapwbasis_"):
                kindstring = link[len("lapwbasis_"):]
                lapw_basis_list[kindstring] = inputdict.pop(link)

        import xml.etree.ElementTree as ET
        root = ET.Element("input")
        ET.SubElement(root, "title").text = "input file created with AiiDA"
        struct = ET.SubElement(root, "structure", attrib={'speciespath' : './', 'autormt' : 'true'})
        cryst = ET.SubElement(struct, "crystal", attrib={'scale' : '1.889725989'})

        from numpy import matrix
        lat_vec = matrix(structure.cell)
        inv_lat_vec = lat_vec.T.I;

        for vector in structure.cell:
            ET.SubElement(cryst, "basevect").text = " ".join(['%18.10f'%e for e in vector])

        for kind in structure.kinds:
            lapw_basis = lapw_basis_list[kind.symbol]
            calcinfo.local_copy_list.append((lapw_basis.get_file_abs_path(), lapw_basis.filename))

            s = ET.SubElement(struct, "species", attrib={'speciesfile' : lapw_basis.filename})
            for site in structure.sites:
                if site.kind_name == kind.name:
                    pos_cart = matrix(site.position)
                    pos_lat = inv_lat_vec * pos_cart.T
                    ET.SubElement(s, "atom", attrib={'coord' : " ".join(['%18.10f'%e for e in pos_lat])})
       
        parameters_dict = parameters.get_dict()
        groundstate_attrib = {}

        if 'groundstate' in parameters_dict:
            groundstate_attrib = parameters_dict['groundstate']
        groundstate_attrib['ngridk'] = " ".join(['%i'%e for e in kmesh])

#        ET.SubElement(root, "groundstate", attrib=groundstate_attrib)

        grnd = ET.SubElement(root, "groundstate", attrib=groundstate_attrib)
        solver_attrib = {}
        if 'solver' in parameters_dict:
            solver_attrib = parameters_dict['solver']
            ET.SubElement(grnd, "solver", attrib=solver_attrib)

        sirius_attrib = {}
        if 'sirius' in parameters_dict:
            sirius_attrib = parameters_dict['sirius']
            ET.SubElement(grnd, "sirius", attrib=sirius_attrib)

        libxc_attrib = {}
        if 'libxc' in parameters_dict:
            libxc_attrib = parameters_dict['libxc']
            ET.SubElement(grnd, "libxc", attrib=libxc_attrib)

        tree = ET.ElementTree(root)
        tree.write(tempfolder.get_abs_path("input.xml"))

        return calcinfo