def run(self): """Run the calculation job. This means invoking the `presubmit` and storing the temporary folder in the node's repository. Then we move the process in the `Wait` state, waiting for the `UPLOAD` transport task to be started. """ if self.inputs.metadata.dry_run: from aiida.common.folders import SubmitTestFolder from aiida.engine.daemon.execmanager import upload_calculation from aiida.transports.plugins.local import LocalTransport with LocalTransport() as transport: with SubmitTestFolder() as folder: calc_info = self.presubmit(folder) transport.chdir(folder.abspath) upload_calculation(self.node, transport, calc_info, folder, inputs=self.inputs, dry_run=True) self.node.dry_run_info = { 'folder': folder.abspath, 'script_filename': self.node.get_option('submit_script_filename') } return plumpy.Stop(None, True) # The following conditional is required for the caching to properly work. Even if the source node has a process # state of `Finished` the cached process will still enter the running state. The process state will have then # been overridden by the engine to `Running` so we cannot check that, but if the `exit_status` is anything other # than `None`, it should mean this node was taken from the cache, so the process should not be rerun. if self.node.exit_status is not None: return self.node.exit_status # Launch the upload operation return plumpy.Wait(msg='Waiting to upload', data=UPLOAD_COMMAND)
def test_upload_local_copy_list(fixture_sandbox, aiida_localhost, aiida_local_code_factory): """Test the ``local_copy_list`` functionality in ``upload_calculation``. Specifically, verify that files in the ``local_copy_list`` do not end up in the repository of the node. """ from aiida.common.datastructures import CalcInfo, CodeInfo from aiida.orm import CalcJobNode, SinglefileData inputs = { 'file_a': SinglefileData(io.BytesIO(b'content_a')).store(), 'file_b': SinglefileData(io.BytesIO(b'content_b')).store(), } node = CalcJobNode(computer=aiida_localhost) node.store() code = aiida_local_code_factory('arithmetic.add', '/bin/bash').store() code_info = CodeInfo() code_info.code_uuid = code.uuid calc_info = CalcInfo() calc_info.uuid = node.uuid calc_info.codes_info = [code_info] calc_info.local_copy_list = [ (inputs['file_a'].uuid, inputs['file_a'].filename, './files/file_a'), (inputs['file_a'].uuid, inputs['file_a'].filename, './files/file_b'), ] with LocalTransport() as transport: execmanager.upload_calculation(node, transport, calc_info, fixture_sandbox) assert node.list_object_names() == []
def run(self): """Run the calculation job. This means invoking the `presubmit` and storing the temporary folder in the node's repository. Then we move the process in the `Wait` state, waiting for the `UPLOAD` transport task to be started. """ from aiida.orm import Code, load_node from aiida.common.folders import SandboxFolder, SubmitTestFolder from aiida.common.exceptions import InputValidationError # The following conditional is required for the caching to properly work. Even if the source node has a process # state of `Finished` the cached process will still enter the running state. The process state will have then # been overridden by the engine to `Running` so we cannot check that, but if the `exit_status` is anything other # than `None`, it should mean this node was taken from the cache, so the process should not be rerun. if self.node.exit_status is not None: return self.node.exit_status if self.inputs.metadata.dry_run: folder_class = SubmitTestFolder else: folder_class = SandboxFolder with folder_class() as folder: computer = self.node.computer if not self.inputs.metadata.dry_run and self.node.has_cached_links(): raise exceptions.InvalidOperation('calculation node has unstored links in cache') calc_info, script_filename = self.presubmit(folder) calc_info.uuid = str(self.uuid) input_codes = [load_node(_.code_uuid, sub_classes=(Code,)) for _ in calc_info.codes_info] for code in input_codes: if not code.can_run_on(computer): raise InputValidationError( 'The selected code {} for calculation {} cannot run on computer {}'.format( code.pk, self.node.pk, computer.name)) # After this call, no modifications to the folder should be done self.node.put_object_from_tree(folder.abspath, force=True) if self.inputs.metadata.dry_run: from aiida.engine.daemon.execmanager import upload_calculation from aiida.transports.plugins.local import LocalTransport with LocalTransport() as transport: transport.chdir(folder.abspath) upload_calculation(self.node, transport, calc_info, script_filename, dry_run=True) self.node.dry_run_info = { 'folder': folder.abspath, 'script_filename': script_filename } return plumpy.Stop(None, True) # Launch the upload operation return plumpy.Wait(msg='Waiting to upload', data=(UPLOAD_COMMAND, calc_info, script_filename))
def do_upload(): with transport_queue.request_transport(authinfo) as request: transport = yield cancellable.with_interrupt(request) with SandboxFolder() as folder: # Any exception thrown in `presubmit` call is not transient so we circumvent the exponential backoff try: calc_info = process.presubmit(folder) except Exception as exception: # pylint: disable=broad-except raise PreSubmitException('exception occurred in presubmit call') from exception else: execmanager.upload_calculation(node, transport, calc_info, folder) raise Return
}, 'label': args.label }, 'pseudos': pseudos } nlcgcalc = CalculationFactory('sirius.py.nlcg') calc = nlcgcalc(inputs) with SandboxFolder() as sandbox_folder: calc_info, script_filename = calc.presubmit(sandbox_folder) calc_info.uuid = calc.node.uuid # calc_info = calc.prepare_for_submission(folder=None) from aiida.engine.daemon import execmanager with computer.get_transport() as transport: calc_info, script_filename = execmanager.upload_calculation( calc.node, transport, calc_info, script_filename=script_filename) # dummy submit job_id = -1 calc.node.set_job_id(job_id) # copy stdout to workdir instead of running the actual calculation remote_workdir = calc.node.attributes['remote_workdir'] copyfile('sirius.py.nlcg.out', os.path.join(remote_workdir, 'sirius.py.nlcg.out')) calc.node.set_state(CalcJobState.RETRIEVING) execmanager.retrieve_calculation(calc.node, transport, retrieved_temporary_folder=None) calc.node.set_state(CalcJobState.PARSING) execmanager.parse_results(calc)
def do_upload(): with transport_queue.request_transport(authinfo) as request: transport = yield cancellable.with_interrupt(request) raise Return( execmanager.upload_calculation(node, transport, calc_info, script_filename))