コード例 #1
0
    def test_launchers_dry_run_no_provenance(self):
        """Test the launchers in `dry_run` mode with `store_provenance=False`."""
        from aiida.plugins import CalculationFactory

        ArithmeticAddCalculation = CalculationFactory('arithmetic.add')

        code = orm.Code(input_plugin_name='arithmetic.add',
                        remote_computer_exec=[self.computer,
                                              '/bin/true']).store()

        inputs = {
            'code': code,
            'x': orm.Int(1),
            'y': orm.Int(1),
            'metadata': {
                'dry_run': True,
                'store_provenance': False,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'num_mpiprocs_per_machine': 1
                    }
                }
            }
        }

        result = launch.run(ArithmeticAddCalculation, **inputs)
        self.assertEqual(result, {})

        result, pk = launch.run_get_pk(ArithmeticAddCalculation, **inputs)
        self.assertEqual(result, {})
        self.assertIsNone(pk)

        result, node = launch.run_get_node(ArithmeticAddCalculation, **inputs)
        self.assertEqual(result, {})
        self.assertIsInstance(node, orm.CalcJobNode)
        self.assertFalse(node.is_stored)
        self.assertIsInstance(node.dry_run_info, dict)
        self.assertIn('folder', node.dry_run_info)
        self.assertIn('script_filename', node.dry_run_info)

        node = launch.submit(ArithmeticAddCalculation, **inputs)
        self.assertIsInstance(node, orm.CalcJobNode)
        self.assertFalse(node.is_stored)
コード例 #2
0
    def test_calcjob_dry_run_no_provenance(self):
        """Test that dry run with `store_provenance=False` still works for unstored inputs.

        The special thing about this test is that the unstored input nodes that will be used in the `local_copy_list`.
        This was broken as the code in `upload_calculation` assumed that the nodes could be loaded through their UUID
        which is not the case in the `store_provenance=False` mode with unstored nodes. Note that it also explicitly
        tests nested namespaces as that is a non-trivial case.
        """
        import os
        import tempfile

        code = orm.Code(input_plugin_name='arithmetic.add', remote_computer_exec=[self.computer, '/bin/true']).store()

        with tempfile.NamedTemporaryFile('w+') as handle:
            handle.write('dummy_content')
            handle.flush()
            single_file = orm.SinglefileData(file=handle.name)
            file_one = orm.SinglefileData(file=handle.name)
            file_two = orm.SinglefileData(file=handle.name)

        inputs = {
            'code': code,
            'single_file': single_file,
            'files': {
                'file_one': file_one,
                'file_two': file_two,
            },
            'metadata': {
                'dry_run': True,
                'store_provenance': False,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'num_mpiprocs_per_machine': 1
                    }
                }
            }
        }

        _, node = launch.run_get_node(FileCalcJob, **inputs)
        self.assertIn('folder', node.dry_run_info)
        for filename in ['path', 'file_one', 'file_two']:
            self.assertIn(filename, os.listdir(node.dry_run_info['folder']))
コード例 #3
0
 def launch_evaluations(self):
     """
     Create evaluations for the current iteration step.
     """
     self.report('Launching pending evaluations.')
     with self.optimizer() as opt:
         evals = {}
         evaluate_process = load_object(self.inputs.evaluate_process.value)
         for idx, inputs in opt.create_inputs().items():
             self.report('Launching evaluation {}'.format(idx))
             inputs_merged = ChainMap(inputs,
                                      self.inputs.get('evaluate', {}))
             if is_process_function(evaluate_process):
                 _, node = run_get_node(evaluate_process, **inputs_merged)
             else:
                 node = self.submit(evaluate_process, **inputs_merged)
             evals[self.eval_key(idx)] = node
             self.indices_to_retrieve.append(idx)
     return self.to_context(**evals)
コード例 #4
0
ファイル: test_parse.py プロジェクト: greschd/aiida-tbmodels
def test_parse(
    configure,  # pylint: disable=unused-argument
    assert_finished,
    get_tbmodels_parse_builder,  # pylint: disable=redefined-outer-name
    check_calc_ok,
    sparsity  # pylint: disable=redefined-outer-name
):
    """
    Test the parse calculation when launched with 'run_get_node'.
    """
    from aiida.orm import SinglefileData
    from aiida.engine.launch import run_get_node

    builder = get_tbmodels_parse_builder
    output, calc = run_get_node(builder, **sparsity)

    assert_finished(calc.pk)
    check_calc_ok(calc)
    assert isinstance(output['tb_model'], SinglefileData)
    assert calc.get_hash() == calc.get_extra('_aiida_hash')
コード例 #5
0
ファイル: test_launch.py プロジェクト: CasperWA/aiida_core
    def test_launchers_dry_run(self):
        """All launchers should work with `dry_run=True`, even `submit` which forwards to `run`."""
        from aiida.plugins import CalculationFactory

        ArithmeticAddCalculation = CalculationFactory('arithmetic.add')  # pylint: disable=invalid-name

        code = orm.Code(input_plugin_name='arithmetic.add',
                        remote_computer_exec=[self.computer,
                                              '/bin/true']).store()

        inputs = {
            'code': code,
            'x': orm.Int(1),
            'y': orm.Int(1),
            'metadata': {
                'dry_run': True,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'num_mpiprocs_per_machine': 1
                    }
                }
            }
        }

        result = launch.run(ArithmeticAddCalculation, **inputs)
        self.assertEqual(result, {})

        result, pk = launch.run_get_pk(ArithmeticAddCalculation, **inputs)
        self.assertEqual(result, {})
        self.assertIsInstance(pk, int)

        result, node = launch.run_get_node(ArithmeticAddCalculation, **inputs)
        self.assertEqual(result, {})
        self.assertIsInstance(node, orm.CalcJobNode)
        self.assertIsInstance(node.dry_run_info, dict)
        self.assertIn('folder', node.dry_run_info)
        self.assertIn('script_filename', node.dry_run_info)

        node = launch.submit(ArithmeticAddCalculation, **inputs)
        self.assertIsInstance(node, orm.CalcJobNode)
コード例 #6
0
ファイル: conftest.py プロジェクト: zooks97/aiida-optimize
    def inner(engine, func_workchain, engine_kwargs, evaluate=None):  # pylint: disable=missing-docstring,useless-suppression
        from aiida_optimize import OptimizationWorkChain
        from aiida.orm import load_node
        from aiida.orm import Dict
        from aiida.engine.launch import run_get_node, submit

        inputs = dict(
            engine=engine,
            engine_kwargs=Dict(dict=dict(engine_kwargs)),
            evaluate_process=func_workchain,
            evaluate=evaluate if evaluate is not None else {},
        )

        if request.param == 'run':
            _, result_node = run_get_node(OptimizationWorkChain, **inputs)
        else:
            assert request.param == 'submit'
            pk = submit(OptimizationWorkChain, **inputs).pk
            wait_for(pk)
            result_node = load_node(pk)
        return result_node
コード例 #7
0
def test_create_evaluate_basic(configure_with_daemon, echo_process):  # pylint: disable=unused-argument,redefined-outer-name
    """
    Test the CreateEvaluateWorkChain by chaining two basic processes.
    """
    from aiida.engine.launch import run_get_node

    CreateEvaluateWorkChain = WorkflowFactory(
        'optimize.wrappers.create_evaluate')  # pylint: disable=invalid-name

    res, node = run_get_node(
        CreateEvaluateWorkChain,
        create_process=echo_process,
        evaluate_process=echo_process,
        create={'x': orm.Float(1)},
        output_input_mapping=orm.Dict(dict={'result': 'x'}))
    assert node.is_finished_ok
    assert 'create' in res
    assert 'evaluate' in res
    assert 'result' in res['create']
    assert 'result' in res['evaluate']
    assert res['create']['result'].value == 1
    assert res['evaluate']['result'].value == 1
コード例 #8
0
 def test_workchain_run_get_node(self):
     result, node = launch.run_get_node(AddWorkChain, a=self.a, b=self.b)
     self.assertEquals(result['result'], self.result)
     self.assertTrue(isinstance(node, orm.WorkChainNode))
コード例 #9
0
 def test_calcfunction_run_get_node(self):
     result, node = launch.run_get_node(add, a=self.a, b=self.b)
     self.assertEquals(result, self.result)
     self.assertTrue(isinstance(node, orm.CalcFunctionNode))
コード例 #10
0
ファイル: launch_add.py プロジェクト: borellim/aiida_add
                'num_machines': Int(1)
            },
            'max_wallclock_seconds': Float(30*60),  # 30 min
            'withmpi': False,
        }
    }
}

daemon = False  # set to True to use the daemon (non-blocking),
               # False to use a local runner (blocking)

if submit_test:
    raise NotImplementedError("Is there an equivalent for calc.submit_test?")
    # subfolder, script_filename = calc.submit_test()
    # print "Test submit file in {}".format(os.path.join(
    #     os.path.relpath(subfolder.abspath),
    #     script_filename
    #     ))
else:    
    if daemon:
        new_calc = launch.submit(AddCalculation, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(AddCalculation.__name__, new_calc.pk))
    else:
        click.echo('Running an add calculation... ')
        _, new_calc = launch.run_get_node(AddCalculation, **inputs)
        click.echo('AddCalculation<{}> terminated with state: {}'.format(new_calc.pk, new_calc.process_state))
        click.echo('\n{link:25s} {node}'.format(link='Output link', node='Node pk and type'))
        click.echo('{s}'.format(s='-' * 60))
        for triple in sorted(new_calc.get_outgoing().all(), key=lambda triple: triple.link_label):
            click.echo('{:25s} {}<{}> '.format(triple.link_label, triple.node.__class__.__name__, triple.node.pk))
コード例 #11
0
 def test_calcfunction_run_get_node(self):
     """Test calcfunction run by run_get_node."""
     result, node = launch.run_get_node(add, term_a=self.term_a, term_b=self.term_b)
     self.assertEqual(result, self.result)
     self.assertTrue(isinstance(node, orm.CalcFunctionNode))
コード例 #12
0
 def test_workchain_run_get_node(self):
     """Test workchain run by run_get_node."""
     result, node = launch.run_get_node(AddWorkChain, term_a=self.term_a, term_b=self.term_b)
     self.assertEqual(result['result'], self.result)
     self.assertTrue(isinstance(node, orm.WorkChainNode))
コード例 #13
0
def launch_cif_clean(cif_filter, cif_select, group_cif_raw, group_cif_clean, group_structure, group_workchain, node,
    max_entries, skip_check, parse_engine, daemon):
    """Run the `CifCleanWorkChain` on the entries in a group with raw imported CifData nodes.

    It will use the `cif_filter` and `cif_select` scripts of `cod-tools` to clean the input cif file. Additionally, if
    the `group-structure` option is passed, the workchain will also attempt to use the given parse engine to parse the
    cleaned `CifData` to obtain the structure and then use SeeKpath to find the primitive structure, which, if
    successful, will be added to the `group-structure` group.
    """
    # pylint: disable=too-many-arguments,too-many-locals,too-many-statements,too-many-branches
    import inspect
    from datetime import datetime

    from aiida import orm
    from aiida.engine import launch
    from aiida.plugins import DataFactory, WorkflowFactory
    from aiida_codtools.cli.utils.display import echo_utc
    from aiida_codtools.common.resources import get_default_options
    from aiida_codtools.common.utils import get_input_node

    CifData = DataFactory('cif')  # pylint: disable=invalid-name
    CifCleanWorkChain = WorkflowFactory('codtools.cif_clean')  # pylint: disable=invalid-name

    # Collect the dictionary of not None parameters passed to the launch script and print to screen
    local_vars = locals()
    launch_paramaters = {}
    for arg in inspect.getargspec(launch_cif_clean.callback).args:  # pylint: disable=deprecated-method
        if arg in local_vars and local_vars[arg]:
            launch_paramaters[arg] = local_vars[arg]

    click.echo('=' * 80)
    click.echo('Starting on {}'.format(datetime.utcnow().isoformat()))
    click.echo('Launch parameters: {}'.format(launch_paramaters))
    click.echo('-' * 80)

    if group_cif_raw is not None:

        # Get CifData nodes that should actually be submitted according to the input filters
        builder = orm.QueryBuilder()
        builder.append(orm.Group, filters={'id': {'==': group_cif_raw.pk}}, tag='group')

        if skip_check:
            builder.append(CifData, with_group='group', project=['*'])
        else:
            # Get CifData nodes that already have an associated workchain node in the `group_workchain` group.
            submitted = orm.QueryBuilder()
            submitted.append(orm.WorkChainNode, tag='workchain')
            submitted.append(orm.Group, filters={'id': {'==': group_workchain.pk}}, with_node='workchain')
            submitted.append(orm.CifData, with_outgoing='workchain', tag='data', project=['id'])
            submitted_nodes = set(pk for entry in submitted.all() for pk in entry)

            if submitted_nodes:
                filters = {'id': {'!in': submitted_nodes}}
            else:
                filters = {}

            # Get all CifData nodes that are not included in the submitted node list
            builder.append(CifData, with_group='group', filters=filters, project=['*'])

        if max_entries is not None:
            builder.limit(int(max_entries))

        nodes = [entry[0] for entry in builder.all()]

    elif node is not None:

        nodes = [node]

    else:
        raise click.BadParameter('you have to specify either --group-cif-raw or --node')

    counter = 0

    node_cif_filter_parameters = get_input_node(orm.Dict, {
        'fix-syntax-errors': True,
        'use-c-parser': True,
        'use-datablocks-without-coordinates': True,
    })

    node_cif_select_parameters = get_input_node(orm.Dict, {
        'canonicalize-tag-names': True,
        'dont-treat-dots-as-underscores': True,
        'invert': True,
        'tags': '_publ_author_name,_citation_journal_abbrev',
        'use-c-parser': True,
    })

    node_parse_engine = get_input_node(orm.Str, parse_engine)
    node_site_tolerance = get_input_node(orm.Float, 5E-4)
    node_symprec = get_input_node(orm.Float, 5E-3)

    for cif in nodes:

        inputs = {
            'cif': cif,
            'cif_filter': {
                'code': cif_filter,
                'parameters': node_cif_filter_parameters,
                'metadata': {
                    'options': get_default_options()
                }
            },
            'cif_select': {
                'code': cif_select,
                'parameters': node_cif_select_parameters,
                'metadata': {
                    'options': get_default_options()
                }
            },
            'parse_engine': node_parse_engine,
            'site_tolerance': node_site_tolerance,
            'symprec': node_symprec,
        }

        if group_cif_clean is not None:
            inputs['group_cif'] = group_cif_clean

        if group_structure is not None:
            inputs['group_structure'] = group_structure

        if daemon:
            workchain = launch.submit(CifCleanWorkChain, **inputs)
            echo_utc('CifData<{}> submitting: {}<{}>'.format(cif.pk, CifCleanWorkChain.__name__, workchain.pk))
        else:
            echo_utc('CifData<{}> running: {}'.format(cif.pk, CifCleanWorkChain.__name__))
            _, workchain = launch.run_get_node(CifCleanWorkChain, **inputs)

        if group_workchain is not None:
            group_workchain.add_nodes([workchain])

        counter += 1

        if max_entries is not None and counter >= max_entries:
            break

    click.echo('-' * 80)
    click.echo('Submitted {} new workchains'.format(counter))
    click.echo('Stopping on {}'.format(datetime.utcnow().isoformat()))
    click.echo('=' * 80)