Example #1
0
    def test_function_set_label_description(self):
        """Verify that the label and description can be set for all process function variants."""
        metadata = {'label': CUSTOM_LABEL, 'description': CUSTOM_DESCRIPTION}

        _, node = self.function_args.run_get_node(data_a=orm.Int(DEFAULT_INT),
                                                  metadata=metadata)
        self.assertEqual(node.label, CUSTOM_LABEL)
        self.assertEqual(node.description, CUSTOM_DESCRIPTION)

        _, node = self.function_args_with_default.run_get_node(
            metadata=metadata)
        self.assertEqual(node.label, CUSTOM_LABEL)
        self.assertEqual(node.description, CUSTOM_DESCRIPTION)

        _, node = self.function_kwargs.run_get_node(metadata=metadata)
        self.assertEqual(node.label, CUSTOM_LABEL)
        self.assertEqual(node.description, CUSTOM_DESCRIPTION)

        _, node = self.function_args_and_kwargs.run_get_node(
            data_a=orm.Int(DEFAULT_INT), metadata=metadata)
        self.assertEqual(node.label, CUSTOM_LABEL)
        self.assertEqual(node.description, CUSTOM_DESCRIPTION)

        _, node = self.function_args_and_default.run_get_node(
            data_a=orm.Int(DEFAULT_INT), metadata=metadata)
        self.assertEqual(node.label, CUSTOM_LABEL)
        self.assertEqual(node.description, CUSTOM_DESCRIPTION)
Example #2
0
    def test_input_and_create_links(self, temp_dir):
        """
        Simple test that will verify that INPUT and CREATE links are properly exported and
        correctly recreated upon import.
        """
        node_work = orm.CalculationNode()
        node_input = orm.Int(1).store()
        node_output = orm.Int(2).store()

        node_work.add_incoming(node_input, LinkType.INPUT_CALC, 'input')
        node_work.store()
        node_output.add_incoming(node_work, LinkType.CREATE, 'output')

        node_work.seal()

        export_links = get_all_node_links()
        export_file = os.path.join(temp_dir, 'export.aiida')
        export([node_output], filename=export_file)

        self.clean_db()

        import_data(export_file)
        import_links = get_all_node_links()

        export_set = [tuple(_) for _ in export_links]
        import_set = [tuple(_) for _ in import_links]

        self.assertSetEqual(set(export_set), set(import_set))
Example #3
0
    def test_builder_restart_work_chain(self):
        """Verify that nested namespaces imploded into flat link labels can be reconstructed into nested namespaces."""
        caller = orm.WorkChainNode().store()

        node = orm.WorkChainNode(process_type=ExampleWorkChain.build_process_type())
        node.add_incoming(self.inputs['dynamic']['namespace']['alp'], LinkType.INPUT_WORK, 'dynamic__namespace__alp')
        node.add_incoming(self.inputs['name']['spaced'], LinkType.INPUT_WORK, 'name__spaced')
        node.add_incoming(self.inputs['name_spaced'], LinkType.INPUT_WORK, 'name_spaced')
        node.add_incoming(self.inputs['boolean'], LinkType.INPUT_WORK, 'boolean')
        node.add_incoming(orm.Int(DEFAULT_INT).store(), LinkType.INPUT_WORK, 'default')
        node.add_incoming(caller, link_type=LinkType.CALL_WORK, link_label='CALL_WORK')
        node.store()

        builder = node.get_builder_restart()
        self.assertIn('dynamic', builder)
        self.assertIn('namespace', builder.dynamic)
        self.assertIn('alp', builder.dynamic.namespace)
        self.assertIn('name', builder)
        self.assertIn('spaced', builder.name)
        self.assertIn('name_spaced', builder)
        self.assertIn('boolean', builder)
        self.assertIn('default', builder)
        self.assertEqual(builder.dynamic.namespace['alp'], self.inputs['dynamic']['namespace']['alp'])
        self.assertEqual(builder.name.spaced, self.inputs['name']['spaced'])
        self.assertEqual(builder.name_spaced, self.inputs['name_spaced'])
        self.assertEqual(builder.boolean, self.inputs['boolean'])
        self.assertEqual(builder.default, orm.Int(DEFAULT_INT))
Example #4
0
def test_validate_distances_count(ctx):
    """Test the `validate_scale_count` validator."""
    assert dissociation.validate_distances_count(None, ctx) is None
    assert dissociation.validate_distances_count(orm.Int(3), ctx) is None

    assert dissociation.validate_distances_count(
        orm.Int(1), ctx) == 'need at least 2 distances.'
Example #5
0
    def test_calc_job_node_get_builder_restart(self):
        """Test the `CalcJobNode.get_builder_restart` method."""
        original = orm.CalcJobNode(
            computer=self.computer,
            process_type='aiida.calculations:arithmetic.add',
            label='original')
        original.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        original.set_option('max_wallclock_seconds', 1800)

        original.add_incoming(orm.Int(1).store(),
                              link_type=LinkType.INPUT_CALC,
                              link_label='x')
        original.add_incoming(orm.Int(2).store(),
                              link_type=LinkType.INPUT_CALC,
                              link_label='y')
        original.store()

        builder = original.get_builder_restart()

        self.assertIn('x', builder)
        self.assertIn('y', builder)
        self.assertIn('metadata', builder)
        self.assertIn('options', builder.metadata)
        self.assertEqual(builder.x, orm.Int(1))
        self.assertEqual(builder.y, orm.Int(2))
        self.assertDictEqual(builder.metadata.options, original.get_options())
Example #6
0
    def test_output_dictionary(self):
        """Verify that a dictionary can be passed as an output for a namespace."""
        class TestProcess1(Process):
            """Defining a new TestProcess class for testing."""

            _node_class = orm.WorkflowNode

            @classmethod
            def define(cls, spec):
                super().define(spec)
                spec.input_namespace('namespace',
                                     valid_type=orm.Int,
                                     dynamic=True)
                spec.output_namespace('namespace',
                                      valid_type=orm.Int,
                                      dynamic=True)

            def run(self):
                self.out('namespace', self.inputs.namespace)

        results, node = run_get_node(TestProcess1,
                                     namespace={
                                         'alpha': orm.Int(1),
                                         'beta': orm.Int(2)
                                     })

        self.assertTrue(node.is_finished_ok)
        self.assertEqual(results['namespace']['alpha'], orm.Int(1))
        self.assertEqual(results['namespace']['beta'], orm.Int(2))
Example #7
0
def test_validate_scale_count(ctx):
    """Test the `validate_scale_count` validator."""
    assert eos.validate_scale_count(None, ctx) is None
    assert eos.validate_scale_count(orm.Int(3), ctx) is None

    assert eos.validate_scale_count(orm.Int(2),
                                    ctx) == 'need at least 3 scaling factors.'
Example #8
0
def test_add_default(fixture_sandbox, aiida_localhost, generate_calc_job):
    """Test a default `ArithmeticAddCalculation`."""
    entry_point_name = 'arithmetic.add'
    inputs = {'x': orm.Int(1), 'y': orm.Int(2), 'code': orm.Code(remote_computer_exec=(aiida_localhost, '/bin/bash'))}

    calc_info = generate_calc_job(fixture_sandbox, entry_point_name, inputs)
    options = ArithmeticAddCalculation.spec().inputs['metadata']['options']

    # Check the attributes of the returned `CalcInfo`
    assert isinstance(calc_info, datastructures.CalcInfo)
    assert sorted(calc_info.retrieve_list) == sorted([options['output_filename'].default])

    codes_info = calc_info.codes_info
    assert isinstance(codes_info, list)
    assert len(codes_info) == 1

    code_info = codes_info[0]
    assert isinstance(code_info, datastructures.CodeInfo)
    assert code_info.code_uuid == inputs['code'].uuid
    assert code_info.stdin_name == options['input_filename'].default
    assert code_info.stdout_name == options['output_filename'].default

    with fixture_sandbox.open(options['input_filename'].default) as handle:
        input_written = handle.read()
        assert input_written == 'echo $(({} + {}))\n'.format(inputs['x'].value, inputs['y'].value)
Example #9
0
 def test_hashes_different(self):
     """Test that the hashes generated for identical process functions with different inputs are the different."""
     _, node1 = self.function_return_input.run_get_node(data=orm.Int(2))
     _, node2 = self.function_return_input.run_get_node(data=orm.Int(3))
     self.assertEqual(node1.get_hash(), node1.get_extra('_aiida_hash'))
     self.assertEqual(node2.get_hash(), node2.get_extra('_aiida_hash'))
     self.assertNotEqual(node1.get_hash(), node2.get_hash())
Example #10
0
    def test_workcalculation(self, temp_dir):
        """Test simple master/slave WorkChainNodes"""
        from aiida.common.links import LinkType

        master = orm.WorkChainNode()
        slave = orm.WorkChainNode()

        input_1 = orm.Int(3).store()
        input_2 = orm.Int(5).store()
        output_1 = orm.Int(2).store()

        master.add_incoming(input_1, LinkType.INPUT_WORK, 'input_1')
        slave.add_incoming(master, LinkType.CALL_WORK, 'CALL')
        slave.add_incoming(input_2, LinkType.INPUT_WORK, 'input_2')

        master.store()
        slave.store()

        output_1.add_incoming(master, LinkType.RETURN, 'RETURN')

        master.seal()
        slave.seal()

        uuids_values = [(v.uuid, v.value) for v in (output_1, )]
        filename1 = os.path.join(temp_dir, 'export1.tar.gz')
        export([output_1], outfile=filename1, silent=True)
        self.clean_db()
        self.insert_data()
        import_data(filename1, silent=True)

        for uuid, value in uuids_values:
            self.assertEqual(orm.load_node(uuid).value, value)
Example #11
0
    def test_port_names_overlapping_mutable_mapping_methods(self):  # pylint: disable=invalid-name
        """Check that port names take precedence over `collections.MutableMapping` methods.

        The `ProcessBuilderNamespace` is a `collections.MutableMapping` but since the port names are made accessible
        as attributes, they can overlap with some of the mappings builtin methods, e.g. `values()`, `items()` etc.
        The port names should take precendence in this case and if one wants to access the mapping methods one needs to
        cast the builder to a dictionary first."""
        builder = ExampleWorkChain.get_builder()

        # The `values` method is obscured by a port that also happens to be called `values`, so calling it should raise
        with self.assertRaises(TypeError):
            builder.values()  # pylint: disable=not-callable

        # However, we can assign a node to it
        builder.values = orm.Int(2)

        # Calling the attribute `values` will then actually try to call the node, which should raise
        with self.assertRaises(TypeError):
            builder.values()  # pylint: disable=not-callable

        # Casting the builder to a dict, *should* then make `values` callable again
        self.assertIn(orm.Int(2), dict(builder).values())

        # The mapping methods should not be auto-completed, i.e. not in the values returned by calling `dir`
        for method in [method for method in dir(MutableMapping) if method != 'values']:
            self.assertNotIn(method, dir(builder))

        # On the other hand, all the port names *should* be present
        for port_name in ExampleWorkChain.spec().inputs.keys():
            self.assertIn(port_name, dir(builder))

        # The `update` method is implemented, but prefixed with an underscore to not block the name for a port
        builder.update({'boolean': orm.Bool(False)})
        self.assertEqual(builder.boolean, orm.Bool(False))
Example #12
0
    def test_function_args_with_default(self):
        """Simple process function that defines a single argument with a default."""
        arg = 1

        result = self.function_args_with_default()
        self.assertTrue(isinstance(result, orm.Int))
        self.assertEqual(result, orm.Int(DEFAULT_INT))

        result = self.function_args_with_default(data_a=orm.Int(arg))
        self.assertTrue(isinstance(result, orm.Int))
        self.assertEqual(result, arg)
Example #13
0
    def get_inputs_and_processclass_from_extras(self, extras_values):
        """Return inputs and process class for the submission of this specific process.

        I just submit an ArithmeticAdd calculation summing the two values stored in the extras:
        ``left_operand + right_operand``.
        """
        inputs = {
            'code': self._code,
            'x': orm.Int(extras_values[0]),
            'y': orm.Int(extras_values[1])
        }
        return inputs, self._process_class
Example #14
0
def test_leak_local_calcjob(aiida_local_code_factory):
    """Test whether running a local CalcJob leaks memory."""
    inputs = {
        'x': orm.Int(1),
        'y': orm.Int(2),
        'code': aiida_local_code_factory('arithmetic.add', '/bin/bash')
    }
    run_finished_ok(ArithmeticAddCalculation, **inputs)

    # check that no reference to the process is left in memory
    # some delay is necessary in order to allow for all callbacks to finish
    process_instances = get_instances(processes.Process, delay=0.2)
    assert not process_instances, f'Memory leak: process instances remain in memory: {process_instances}'
Example #15
0
    def test_function_args_and_kwargs_default(self):
        """Simple process function that defines a positional argument and an argument with a default."""
        arg = 1
        args_input_default = (orm.Int(DEFAULT_INT),)
        args_input_explicit = (orm.Int(DEFAULT_INT), orm.Int(arg))

        result = self.function_args_and_default(*args_input_default)
        self.assertTrue(isinstance(result, dict))
        self.assertEqual(result, {'data_a': args_input_default[0], 'data_b': orm.Int(DEFAULT_INT)})

        result = self.function_args_and_default(*args_input_explicit)
        self.assertTrue(isinstance(result, dict))
        self.assertEqual(result, {'data_a': args_input_explicit[0], 'data_b': args_input_explicit[1]})
Example #16
0
    def test_function_with_none_default(self):
        """Simple process function that defines a keyword with `None` as default value."""
        int_a = orm.Int(1)
        int_b = orm.Int(2)
        int_c = orm.Int(3)

        result = self.function_with_none_default(int_a, int_b)
        self.assertTrue(isinstance(result, orm.Int))
        self.assertEqual(result, orm.Int(3))

        result = self.function_with_none_default(int_a, int_b, int_c)
        self.assertTrue(isinstance(result, orm.Int))
        self.assertEqual(result, orm.Int(6))
    def test_launchers(self):
        """Verify that the various launchers are working."""
        result = run(self.function_return_true)
        self.assertTrue(result)

        result, node = run_get_node(self.function_return_true)
        self.assertTrue(result)
        self.assertEqual(result, get_true_node())
        self.assertTrue(isinstance(node, orm.CalcFunctionNode))

        # Process function can be submitted and will be run by a daemon worker as long as the function is importable
        # Note that the actual running is not tested here but is done so in `.github/system_tests/test_daemon.py`.
        node = submit(add_multiply, x=orm.Int(1), y=orm.Int(2), z=orm.Int(3))
        assert isinstance(node, orm.WorkFunctionNode)
Example #18
0
 def setUpClass(cls, *args, **kwargs):
     super().setUpClass(*args, **kwargs)
     cls.computer.configure()  # pylint: disable=no-member
     cls.remote_code = orm.Code(remote_computer_exec=(cls.computer,
                                                      '/bin/bash')).store()
     cls.local_code = orm.Code(local_executable='bash',
                               files=['/bin/bash']).store()
     cls.inputs = {
         'x': orm.Int(1),
         'y': orm.Int(2),
         'metadata': {
             'options': {}
         }
     }
Example #19
0
def get_kpoint_grid_dimensionality(kpt_data):
    """Get the dimensionality of a k-point grid. If failed, assumes 3D."""
    if not isinstance(kpt_data, orm.KpointsData):
        raise InputValidationError(
            'Invalide type {} for parameter `kpt_data`'.format(type(kpt_data)))

    try:
        mesh = kpt_data.get_kpoints_mesh()[0]
    except:
        return orm.Int(3)

    dim = sum([j != 1 for j in mesh])

    return orm.Int(dim)
Example #20
0
def test_leak_ssh_calcjob():
    """Test whether running a CalcJob over SSH leaks memory.

    Note: This relies on the 'slurm-ssh' computer being set up.
    """
    code = orm.Code(
        input_plugin_name='arithmetic.add',
        remote_computer_exec=[orm.load_computer('slurm-ssh'), '/bin/bash'])
    inputs = {'x': orm.Int(1), 'y': orm.Int(2), 'code': code}
    run_finished_ok(ArithmeticAddCalculation, **inputs)

    # check that no reference to the process is left in memory
    # some delay is necessary in order to allow for all callbacks to finish
    process_instances = get_instances(processes.Process, delay=0.2)
    assert not process_instances, f'Memory leak: process instances remain in memory: {process_instances}'
Example #21
0
    def test_exposed_outputs(self):
        """Test the ``Process.exposed_outputs`` method."""
        from aiida.common import AttributeDict
        from aiida.common.links import LinkType
        from aiida.engine.utils import instantiate_process
        from aiida.manage.manager import get_manager

        runner = get_manager().get_runner()

        class ChildProcess(Process):
            """Dummy process with normal output and output namespace."""

            _node_class = orm.WorkflowNode

            @classmethod
            def define(cls, spec):
                super(ChildProcess, cls).define(spec)
                spec.input('input', valid_type=orm.Int)
                spec.output('output', valid_type=orm.Int)
                spec.output('name.space', valid_type=orm.Int)

        class ParentProcess(Process):
            """Dummy process that exposes the outputs of ``ChildProcess``."""

            _node_class = orm.WorkflowNode

            @classmethod
            def define(cls, spec):
                super(ParentProcess, cls).define(spec)
                spec.input('input', valid_type=orm.Int)
                spec.expose_outputs(ChildProcess)

        node_child = orm.WorkflowNode().store()
        node_output = orm.Int(1).store()
        node_output.add_incoming(node_child, link_label='output', link_type=LinkType.RETURN)
        node_name_space = orm.Int(1).store()
        node_name_space.add_incoming(node_child, link_label='name__space', link_type=LinkType.RETURN)

        process = instantiate_process(runner, ParentProcess, input=orm.Int(1))
        exposed_outputs = process.exposed_outputs(node_child, ChildProcess)

        expected = AttributeDict({
            'name': {
                'space': node_name_space,
            },
            'output': node_output,
        })
        self.assertEqual(exposed_outputs, expected)
Example #22
0
    def test_namespaced_process(self):
        """Test that inputs in nested namespaces are properly validated and the link labels
        are properly formatted by connecting the namespaces with underscores."""
        proc = NameSpacedProcess(
            inputs={'some': {
                'name': {
                    'space': {
                        'a': orm.Int(5)
                    }
                }
            }})

        # Test that the namespaced inputs are AttributesFrozenDicts
        self.assertIsInstance(proc.inputs, AttributesFrozendict)
        self.assertIsInstance(proc.inputs.some, AttributesFrozendict)
        self.assertIsInstance(proc.inputs.some.name, AttributesFrozendict)
        self.assertIsInstance(proc.inputs.some.name.space,
                              AttributesFrozendict)

        # Test that the input node is in the inputs of the process
        input_node = proc.inputs.some.name.space.a
        self.assertTrue(isinstance(input_node, orm.Int))
        self.assertEqual(input_node.value, 5)

        # Check that the link of the process node has the correct link name
        self.assertTrue('some__name__space__a' in
                        proc.node.get_incoming().all_link_labels())
        self.assertEqual(
            proc.node.get_incoming().get_node_by_label('some__name__space__a'),
            5)
Example #23
0
def generate_inputs_base(
        protocol: t.Dict,
        code: orm.Code,
        structure: orm.StructureData,
        otfg_family: OTFGGroup,
        override: t.Dict[str, t.Any] = None) -> t.Dict[str, t.Any]:
    """Generate the inputs for the `CastepBaseWorkChain` for a given code, structure and pseudo potential family.

    :param protocol: the dictionary with protocol inputs.
    :param code: the code to use.
    :param structure: the input structure.
    :param otfg_family: the pseudo potential family.
    :param override: a dictionary to override specific inputs.
    :return: the fully defined input dictionary.
    """
    merged = recursive_merge(protocol, override or {})

    # Here we pass the base namespace in
    calc_dictionary = generate_inputs_calculation(protocol, code, structure,
                                                  otfg_family, override)
    # Structure and pseudo should be define at base level
    calc_dictionary.pop('pseudos')
    # Remove the kpoints input as here we use the spacing directly
    calc_dictionary.pop('kpoints', None)

    dictionary = {
        # Convert to CASTEP convention - no 2pi factor for real/reciprocal space conversion
        # This is the convention that CastepBaseWorkChain uses
        'kpoints_spacing': orm.Float(merged['kpoints_spacing'] / 2 / pi),
        'max_iterations': orm.Int(merged['max_iterations']),
        'pseudos_family': orm.Str(otfg_family.label),
        'calc': calc_dictionary
    }

    return dictionary
Example #24
0
    def test_input_after_stored(self):
        """Verify that adding an input link after storing a `ProcessNode` will raise because it is illegal."""
        from aiida.common import LinkType
        process = test_processes.DummyProcess()

        with self.assertRaises(ValueError):
            process.node.add_incoming(orm.Int(1), link_type=LinkType.INPUT_WORK, link_label='illegal_link')
Example #25
0
    def test_builder_inputs(self):
        """Test the `ProcessBuilder._inputs` method to get the inputs with and without `prune` set to True."""
        builder = LazyProcessNamespace.get_builder()

        # When no inputs are specified specifically, `prune=True` should get rid of completely empty namespaces
        self.assertEqual(builder._inputs(prune=False), {'namespace': {'nested': {}}, 'metadata': {}})
        self.assertEqual(builder._inputs(prune=True), {})

        # With a specific input in `namespace` the case of `prune=True` should now only remove `metadata`
        integer = orm.Int(DEFAULT_INT)
        builder = LazyProcessNamespace.get_builder()
        builder.namespace.a = integer
        self.assertEqual(builder._inputs(prune=False), {'namespace': {'a': integer, 'nested': {}}, 'metadata': {}})
        self.assertEqual(builder._inputs(prune=True), {'namespace': {'a': integer}})

        # A value that is a `Node` instance but also happens to be an "empty mapping" should not be pruned
        empty_node = MappingData()
        builder = LazyProcessNamespace.get_builder()
        builder.namespace.a = empty_node
        self.assertEqual(builder._inputs(prune=False), {'namespace': {'a': empty_node, 'nested': {}}, 'metadata': {}})
        self.assertEqual(builder._inputs(prune=True), {'namespace': {'a': empty_node}})

        # Verify that empty lists are considered as a "value" and are not pruned
        builder = LazyProcessNamespace.get_builder()
        builder.namespace.c = list()
        self.assertEqual(builder._inputs(prune=False), {'namespace': {'c': [], 'nested': {}}, 'metadata': {}})
        self.assertEqual(builder._inputs(prune=True), {'namespace': {'c': []}})

        # Verify that empty lists, even in doubly nested namespace are considered as a "value" and are not pruned
        builder = LazyProcessNamespace.get_builder()
        builder.namespace.nested.bird = list()
        self.assertEqual(builder._inputs(prune=False), {'namespace': {'nested': {'bird': []}}, 'metadata': {}})
        self.assertEqual(builder._inputs(prune=True), {'namespace': {'nested': {'bird': []}}})
def generate_inputs_base(protocol: Dict,
                         code: orm.Code,
                         structure: orm.StructureData,
                         otfg_family: OTFGGroup,
                         override: Dict[str, Any] = None) -> Dict[str, Any]:
    """Generate the inputs for the `PwBaseWorkChain` for a given code, structure and pseudo potential family.

    :param protocol: the dictionary with protocol inputs.
    :param code: the code to use.
    :param structure: the input structure.
    :param otfg_family: the pseudo potential family.
    :param override: a dictionary to override specific inputs.
    :return: the fully defined input dictionary.
    """
    merged = recursive_merge(protocol, override or {})

    # Here we pass the base namespace in
    calc_dictionary = generate_inputs_calculation(protocol, code, structure,
                                                  otfg_family,
                                                  override.get('calc', {}))
    # Structure and pseudo should be define at base level
    calc_dictionary.pop('structure')
    calc_dictionary.pop('pseudos')
    # Remove the kpoints input as here we use the spacing directly
    calc_dictionary.pop('kpoints', None)

    dictionary = {
        'kpoints_spacing': orm.Float(merged['kpoints_spacing']),
        'max_iterations': orm.Int(merged['max_iterations']),
        'pseudos_family': orm.Str(otfg_family.label),
        'calc': calc_dictionary
    }

    return dictionary
Example #27
0
 def define(cls, spec):
     super(BigDFTRelaxWorkChain, cls).define(spec)
     spec.expose_inputs(BigDFTBaseWorkChain,
                        exclude=['parameters',
                                 'extra_retrieved_files'])
     spec.input('parameters', valid_type=BigDFTParameters, required=False,
                default=lambda: orm.Dict(), help='param dictionary')
     spec.input('extra_retrieved_files', valid_type=List, required=False,
                help='', default=lambda: List())
     spec.input('relax.perform', valid_type=orm.Bool, required=False,
                default=lambda: orm.Bool(True), help='perform relaxation')
     spec.input('relax.algo', valid_type=orm.Str,
                default=lambda: orm.Str('FIRE'),
                help='algorithm to use during relaxation')
     spec.input('relax.threshold_forces', valid_type=orm.Float, required=False,
                default=lambda: orm.Float(0.0), help='energy cutoff value, in ev/Ang')
     spec.input('relax.steps', valid_type=orm.Int, required=False,
                default=lambda: orm.Int(50),
                help='number of relaxation steps to perform.')
     spec.outline(
         cls.relax,
         cls.results,
     )
     spec.expose_outputs(BigDFTBaseWorkChain)
     spec.output('relaxed_structure', valid_type=StructureData,
                 required=False)
     spec.output('forces', valid_type=ArrayData, required=False)
     spec.output('total_energy', valid_type=orm.Float, required=False)
     spec.exit_code(101, 'ERROR_FAILED_RELAX',
                    'Subprocess failed for relaxation')
Example #28
0
 def function_defaults(
     data_a=orm.Int(DEFAULT_INT), metadata={
         'label': DEFAULT_LABEL,
         'description': DEFAULT_DESCRIPTION
     }
 ):  # pylint: disable=unused-argument,dangerous-default-value,missing-docstring
     return data_a
Example #29
0
 def define(cls, spec):
     super().define(spec)
     spec.input_namespace('dynamic.namespace', dynamic=True)
     spec.input('values', valid_type=orm.Int, help='Port name that overlaps with method of mutable mapping')
     spec.input('name.spaced', valid_type=orm.Int, help='Namespaced port')
     spec.input('name_spaced', valid_type=orm.Str, help='Not actually a namespaced port')
     spec.input('boolean', valid_type=orm.Bool, help='A pointless boolean')
     spec.input('default', valid_type=orm.Int, default=orm.Int(DEFAULT_INT).store())
Example #30
0
    def test_function_args_and_kwargs(self):
        """Simple process function that defines a positional argument and keyword arguments."""
        arg = 1
        args = (orm.Int(DEFAULT_INT), )
        kwargs = {'data_b': orm.Int(arg)}

        result = self.function_args_and_kwargs(*args)
        self.assertTrue(isinstance(result, dict))
        self.assertEqual(result, {'data_a': args[0]})

        result = self.function_args_and_kwargs(*args, **kwargs)
        self.assertTrue(isinstance(result, dict))
        self.assertEqual(result, {
            'data_a': args[0],
            'data_b': kwargs['data_b']
        })

        # Calling with more positional arguments than defined in the signature should raise
        with self.assertRaises(TypeError):
            self.function_kwargs.run_get_node(orm.Int(1), orm.Int(2))

        with self.assertRaises(TypeError):
            self.function_kwargs.run_get_node(orm.Int(1),
                                              orm.Int(2),
                                              b=orm.Int(2))