Exemplo n.º 1
0
    def get_builder(self):
        """Create and return a new `ProcessBuilder` for the `CalcJob` class of the plugin configured for this code.

        The configured calculation plugin class is defined by the `get_input_plugin_name` method.

        .. note:: it also sets the ``builder.code`` value.

        :return: a `ProcessBuilder` instance with the `code` input already populated with ourselves
        :raise aiida.common.EntryPointError: if the specified plugin does not exist.
        :raise ValueError: if no default plugin was specified.
        """
        from aiida.plugins import CalculationFactory

        plugin_name = self.get_input_plugin_name()

        if plugin_name is None:
            raise ValueError(
                'no default calculation input plugin specified for this code')

        try:
            process_class = CalculationFactory(plugin_name)
        except EntryPointError:
            raise EntryPointError(
                'the calculation entry point `{}` could not be loaded'.format(
                    plugin_name))

        builder = process_class.get_builder()
        builder.code = self

        return builder
Exemplo n.º 2
0
def launch_inpgen(structure, inpgen, calc_parameters, settings, daemon):
    """
    Launch an inpgen calcjob on given input

    If no code is given it queries the DB for inpgen codes and uses the one with
    the newest creation time.

    Either structure or anysource_structure can be specified.
    Default structure is Si bulk.
    """

    process_class = CalculationFactory('fleur.inpgen')
    inputs = {
        'code': inpgen,
        'structure': structure,
        'parameters': calc_parameters,
        'settings': settings,
        'metadata': {
            'options': {
                'withmpi': False,
                'max_wallclock_seconds': 6000,
                'resources': {
                    'num_machines': 1,
                    'num_mpiprocs_per_machine': 1,
                }
            }
        }
    }
    inputs = clean_nones(inputs)
    builder = process_class.get_builder()
    builder.update(inputs)
    launch_process(builder, daemon)
Exemplo n.º 3
0
def launch_fleur(fleurinp, fleur, parent_folder, settings, daemon,
                 max_num_machines, max_wallclock_seconds,
                 num_mpiprocs_per_machine, option_node, with_mpi, launch_base):
    """
    Launch a base_fleur workchain.
    If launch_base is False launch a single fleur calcjob instead.

    """

    process_class = CalculationFactory('fleur.fleur')
    workchain_class = WorkflowFactory('fleur.base')

    inputs = {
        'code': fleur,
        'fleurinpdata': fleurinp,
        'parent_folder': parent_folder,
        'settings': settings,
        'metadata': {
            'options': {
                'withmpi': with_mpi,
                'max_wallclock_seconds': max_wallclock_seconds,
                'resources': {
                    'num_machines': max_num_machines,
                    'num_mpiprocs_per_machine': num_mpiprocs_per_machine,
                }
            }
        }
    }

    if not launch_base:
        inputs = clean_nones(inputs)
        builder = process_class.get_builder()
        builder.update(inputs)
    else:
        if option_node is None:
            option_node = Dict(
                dict={
                    'withmpi': with_mpi,
                    'max_wallclock_seconds': max_wallclock_seconds,
                    'resources': {
                        'num_machines': max_num_machines,
                        'num_mpiprocs_per_machine': num_mpiprocs_per_machine
                    }
                })

        inputs_base = {
            'code': fleur,
            'fleurinpdata': fleurinp,
            'parent_folder': parent_folder,
            'settings': settings,
            'options': option_node
        }
        inputs_base = clean_nones(inputs_base)
        builder = workchain_class.get_builder()
        builder.update(**inputs_base)

    launch_process(builder, daemon)
Exemplo n.º 4
0
def silicon_builder(db_test_app):
    """Prepare a mock - ready calculation for silicon"""
    silicon = orm.StructureData()
    r_unit = 2.6954645
    silicon.set_cell(np.array([[1, 1, 0], [1, 0, 1], [0, 1, 1]]) * r_unit)
    silicon.append_atom(symbols=["Si"], position=[0, 0, 0])
    silicon.append_atom(symbols=["Si"], position=[r_unit * 0.5] * 3)
    silicon.label = "Si"
    silicon.description = "A silicon structure"
    param_dict = {
        # Notice that the keywords are group into two sub-dictionaries
        # just like you would do when preparing the inputs by hand
        "CELL": {
            "symmetry_generate": True,
            "snap_to_symmetry": True,
            # Pass a list of string to set a BLOCK inputs
            #"cell_constraints":
            #["0 0 0", "0 0 0"]
        },
        "PARAM": {
            "task": "singlepoint",
            "basis_precision": "medium",
            "fix_occupancy":
            True,  # Use bool type to make it easy for querying
            "opt_strategy": "memory",
            "num_dump_cycles": 0,
            "write_formatted_density": True
        }
    }
    # We need to create a Dict node that holds the dictionary
    param = orm.Dict(dict=param_dict)
    kpoints = orm.KpointsData()
    # Use gamma and 0.25, 0.25, 0.25
    kpoints.set_kpoints_mesh((4, 4, 4), offset=(0, 0, 0))
    c9 = OTFGData(otfg_entry="C9")
    CastepCalculation = CalculationFactory('castep.castep')
    code_path = check_output(['which', 'castep.mock'],
                             universal_newlines=True).strip()
    castep_mock = orm.Code((db_test_app.localhost, code_path),
                           input_plugin_name='castep.castep')

    builder = CastepCalculation.get_builder()
    builder.structure = silicon
    builder.parameters = param
    builder.kpoints = kpoints
    builder.code = castep_mock
    builder.pseudos = {'Si': c9}
    builder.metadata.options.withmpi = False
    builder.metadata.options.resources = {
        'num_machines': 1,
        'tot_num_mpiprocs': 2
    }
    builder.metadata.options.max_wallclock_seconds = 600
    builder.metadata.label = "Si SINGLEPOINT"
    builder.metadata.description = 'A Example CASTEP calculation for silicon'
    return builder
Exemplo n.º 5
0
def get_inputs_inpgen(structure, inpgencode, options, label='', description='', settings=None, params=None, **kwargs):
    '''
    Assembles the input dictionary for Fleur Calculation.

    :param structure: input structure of StructureData type
    :param inpgencode: inpgen code of Code type
    :param options: calculation options that will be stored in metadata
    :param label: a string setting a label of the CalcJob in the DB
    :param description: a string setting a description of the CalcJob in the DB
    :param params: input parameters for inpgen code of Dict type

    Example of use::

        inputs_build = get_inputs_inpgen(structure, inpgencode, options, label,
                                         description, params=params)
        future = self.submit(inputs_build)

    '''

    FleurinpProcess = CalculationFactory('fleur.inpgen')
    inputs = FleurinpProcess.get_builder()

    if structure:
        inputs.structure = structure
    if inpgencode:
        inputs.code = inpgencode
    if params:
        inputs.parameters = params
    if settings:
        inputs.settings = settings
    if description:
        inputs.metadata.description = description
    else:
        inputs.metadata.description = ''

    if label:
        inputs.metadata.label = label
    else:
        inputs.metadata.label = ''

    if not options:
        options = {}
    # inpgen run always serial
    options['withmpi'] = False
    options['resources'] = {'num_machines': 1, 'num_mpiprocs_per_machine': 1}

    if options:
        inputs.metadata.options = options

    # Currently this does not work, find out howto...
    # for key, val in kwargs.items():
    #    inputs[key] = val

    return inputs
Exemplo n.º 6
0
    def builder(self):
        code = self.machine['code@computer']
        tot_num_mpiprocs = self.machine['n']
        max_wallclock_seconds = self.machine['W']
        queue_name = self.machine['q']
        custom_scheduler_commands = self.machine['R']

        Cp2kCalculation = CalculationFactory('cp2k')
        builder = Cp2kCalculation.get_builder()
        builder.structure = self.structure
        builder.parameters = self.parameters
        builder.code = Code.get_from_string(code)
        builder.metadata.options.resources = {'tot_num_mpiprocs': tot_num_mpiprocs}
        builder.metadata.options.max_wallclock_seconds = max_wallclock_seconds
        builder.metadata.options.queue_name = queue_name
        builder.metadata.options.custom_scheduler_commands = f'#BSUB -R \"{custom_scheduler_commands}\"'
        return builder
Exemplo n.º 7
0
def main(
    parameters: orm.Dict,
    structure: orm.StructureData,
    potential: LammpsPotentialData,
    options: AttributeDict,
    code: orm.Code,
) -> orm.Node:
    """
    Submission of the calculation for an MD run in ``LAMMPS``.

    :param parameters: calculation parameters to control the ``LAMMPS`` calculation
    :type parameters: orm.Dict
    :param structure: structure to be used in the calculation
    :type structure: orm.StructureData
    :param potential: potential to be used in the calculation
    :type potential: LammpsPotentialData
    :param options: options to control the submission parameters
    :type options: AttributeDict
    :param code: code describing the ``LAMMPS`` calculation
    :type code: orm.Code
    :return: node containing the ``LAMMPS`` calculation
    :rtype: orm.Node
    """

    calculation = CalculationFactory('lammps.base')

    builder = calculation.get_builder()
    builder.code = code
    builder.structure = structure
    builder.parameters = parameters
    builder.potential = potential
    builder.metadata.options = options

    node = submit(calculation, **builder)

    return node
Exemplo n.º 8
0
calc.description = "A much longer description"

calc.use_code(code)

calc.use_structure(structure)
calc.use_parameters(Dict(dict=dynaphopy_parameters))
calc.use_force_constants(force_constants)
calc.use_trajectory(trajectory)

calc.store_all()

calc.submit()
print("submitted calculation with PK={}".format(calc.dbnode.pk))

LammpsOptimizeCalculation = CalculationFactory('lammps.optimize')
inputs = LammpsOptimizeCalculation.get_builder()

# Computer options
options = AttributeDict()
options.account = ''
options.qos = ''
options.resources = {
    'num_machines': 1,
    'num_mpiprocs_per_machine': 1,
    'parallel_env': 'localmpi',
    'tot_num_mpiprocs': 1
}
#options.queue_name = 'iqtc04.q'
options.max_wallclock_seconds = 3600
inputs.metadata.options = options
Exemplo n.º 9
0
class TestProcessBuilder(AiidaTestCase):
    """Test process builder.    """
    def setUp(self):
        super().setUp()
        self.assertIsNone(Process.current())
        self.process_class = CalculationFactory('templatereplacer')
        self.builder = self.process_class.get_builder()
        self.builder_workchain = ExampleWorkChain.get_builder()
        self.inputs = {
            'dynamic': {
                'namespace': {
                    'alp': orm.Int(1).store()
                }
            },
            'name': {
                'spaced': orm.Int(1).store(),
            },
            'name_spaced': orm.Str('underscored').store(),
            'boolean': orm.Bool(True).store(),
            'metadata': {}
        }

    def tearDown(self):
        super().tearDown()
        self.assertIsNone(Process.current())

    def test_builder_inputs(self):
        """Test the `ProcessBuilder._inputs` method to get the inputs with and without `prune` set to True."""
        builder = LazyProcessNamespace.get_builder()

        # When no inputs are specified specifically, `prune=True` should get rid of completely empty namespaces
        self.assertEqual(builder._inputs(prune=False), {
            'namespace': {
                'nested': {}
            },
            'metadata': {}
        })
        self.assertEqual(builder._inputs(prune=True), {})

        # With a specific input in `namespace` the case of `prune=True` should now only remove `metadata`
        integer = orm.Int(DEFAULT_INT)
        builder = LazyProcessNamespace.get_builder()
        builder.namespace.a = integer
        self.assertEqual(builder._inputs(prune=False), {
            'namespace': {
                'a': integer,
                'nested': {}
            },
            'metadata': {}
        })
        self.assertEqual(builder._inputs(prune=True),
                         {'namespace': {
                             'a': integer
                         }})

        # A value that is a `Node` instance but also happens to be an "empty mapping" should not be pruned
        empty_node = MappingData()
        builder = LazyProcessNamespace.get_builder()
        builder.namespace.a = empty_node
        self.assertEqual(builder._inputs(prune=False), {
            'namespace': {
                'a': empty_node,
                'nested': {}
            },
            'metadata': {}
        })
        self.assertEqual(builder._inputs(prune=True),
                         {'namespace': {
                             'a': empty_node
                         }})

        # Verify that empty lists are considered as a "value" and are not pruned
        builder = LazyProcessNamespace.get_builder()
        builder.namespace.c = list()
        self.assertEqual(builder._inputs(prune=False), {
            'namespace': {
                'c': [],
                'nested': {}
            },
            'metadata': {}
        })
        self.assertEqual(builder._inputs(prune=True), {'namespace': {'c': []}})

        # Verify that empty lists, even in doubly nested namespace are considered as a "value" and are not pruned
        builder = LazyProcessNamespace.get_builder()
        builder.namespace.nested.bird = list()
        self.assertEqual(builder._inputs(prune=False), {
            'namespace': {
                'nested': {
                    'bird': []
                }
            },
            'metadata': {}
        })
        self.assertEqual(builder._inputs(prune=True),
                         {'namespace': {
                             'nested': {
                                 'bird': []
                             }
                         }})

    def test_process_builder_attributes(self):
        """Check that the builder has all the input ports of the process class as attributes."""
        for name, _ in self.process_class.spec().inputs.items():
            self.assertTrue(hasattr(self.builder, name))

    def test_process_builder_set_attributes(self):
        """Verify that setting attributes in builder works."""
        label = 'Test label'
        description = 'Test description'

        self.builder.metadata.label = label
        self.builder.metadata.description = description

        self.assertEqual(self.builder.metadata.label, label)
        self.assertEqual(self.builder.metadata.description, description)

    def test_dynamic_setters(self):
        """Verify that the attributes of the DummyWorkChain can be set but defaults are not there."""
        self.builder_workchain.dynamic.namespace = self.inputs['dynamic'][
            'namespace']
        self.builder_workchain.name.spaced = self.inputs['name']['spaced']
        self.builder_workchain.name_spaced = self.inputs['name_spaced']
        self.builder_workchain.boolean = self.inputs['boolean']
        self.assertEqual(self.builder_workchain, self.inputs)

    def test_dynamic_getters_value(self):
        """Verify that getters will return the actual value."""
        self.builder_workchain.dynamic.namespace = self.inputs['dynamic'][
            'namespace']
        self.builder_workchain.name.spaced = self.inputs['name']['spaced']
        self.builder_workchain.name_spaced = self.inputs['name_spaced']
        self.builder_workchain.boolean = self.inputs['boolean']

        # Verify that the correct type is returned by the getter
        self.assertTrue(
            isinstance(self.builder_workchain.dynamic.namespace, dict))
        self.assertTrue(isinstance(self.builder_workchain.name.spaced,
                                   orm.Int))
        self.assertTrue(isinstance(self.builder_workchain.name_spaced,
                                   orm.Str))
        self.assertTrue(isinstance(self.builder_workchain.boolean, orm.Bool))

        # Verify that the correct value is returned by the getter
        self.assertEqual(self.builder_workchain.dynamic.namespace,
                         self.inputs['dynamic']['namespace'])
        self.assertEqual(self.builder_workchain.name.spaced,
                         self.inputs['name']['spaced'])
        self.assertEqual(self.builder_workchain.name_spaced,
                         self.inputs['name_spaced'])
        self.assertEqual(self.builder_workchain.boolean,
                         self.inputs['boolean'])

    def test_dynamic_getters_doc_string(self):
        """Verify that getters have the correct docstring."""
        builder = ExampleWorkChain.get_builder()
        self.assertEqual(builder.__class__.name_spaced.__doc__,
                         str(ExampleWorkChain.spec().inputs['name_spaced']))
        self.assertEqual(builder.__class__.boolean.__doc__,
                         str(ExampleWorkChain.spec().inputs['boolean']))

    def test_builder_restart_work_chain(self):
        """Verify that nested namespaces imploded into flat link labels can be reconstructed into nested namespaces."""
        caller = orm.WorkChainNode().store()

        node = orm.WorkChainNode(
            process_type=ExampleWorkChain.build_process_type())
        node.add_incoming(self.inputs['dynamic']['namespace']['alp'],
                          LinkType.INPUT_WORK, 'dynamic__namespace__alp')
        node.add_incoming(self.inputs['name']['spaced'], LinkType.INPUT_WORK,
                          'name__spaced')
        node.add_incoming(self.inputs['name_spaced'], LinkType.INPUT_WORK,
                          'name_spaced')
        node.add_incoming(self.inputs['boolean'], LinkType.INPUT_WORK,
                          'boolean')
        node.add_incoming(
            orm.Int(DEFAULT_INT).store(), LinkType.INPUT_WORK, 'default')
        node.add_incoming(caller,
                          link_type=LinkType.CALL_WORK,
                          link_label='CALL_WORK')
        node.store()

        builder = node.get_builder_restart()
        self.assertIn('dynamic', builder)
        self.assertIn('namespace', builder.dynamic)
        self.assertIn('alp', builder.dynamic.namespace)
        self.assertIn('name', builder)
        self.assertIn('spaced', builder.name)
        self.assertIn('name_spaced', builder)
        self.assertIn('boolean', builder)
        self.assertIn('default', builder)
        self.assertEqual(builder.dynamic.namespace['alp'],
                         self.inputs['dynamic']['namespace']['alp'])
        self.assertEqual(builder.name.spaced, self.inputs['name']['spaced'])
        self.assertEqual(builder.name_spaced, self.inputs['name_spaced'])
        self.assertEqual(builder.boolean, self.inputs['boolean'])
        self.assertEqual(builder.default, orm.Int(DEFAULT_INT))

    def test_port_names_overlapping_mutable_mapping_methods(self):  # pylint: disable=invalid-name
        """Check that port names take precedence over `collections.MutableMapping` methods.

        The `ProcessBuilderNamespace` is a `collections.MutableMapping` but since the port names are made accessible
        as attributes, they can overlap with some of the mappings builtin methods, e.g. `values()`, `items()` etc.
        The port names should take precendence in this case and if one wants to access the mapping methods one needs to
        cast the builder to a dictionary first."""
        builder = ExampleWorkChain.get_builder()

        # The `values` method is obscured by a port that also happens to be called `values`, so calling it should raise
        with self.assertRaises(TypeError):
            builder.values()  # pylint: disable=not-callable

        # However, we can assign a node to it
        builder.values = orm.Int(2)

        # Calling the attribute `values` will then actually try to call the node, which should raise
        with self.assertRaises(TypeError):
            builder.values()  # pylint: disable=not-callable

        # Casting the builder to a dict, *should* then make `values` callable again
        self.assertIn(orm.Int(2), dict(builder).values())

        # The mapping methods should not be auto-completed, i.e. not in the values returned by calling `dir`
        for method in [
                method for method in dir(MutableMapping) if method != 'values'
        ]:
            self.assertNotIn(method, dir(builder))

        # On the other hand, all the port names *should* be present
        for port_name in ExampleWorkChain.spec().inputs.keys():
            self.assertIn(port_name, dir(builder))

        # The `update` method is implemented, but prefixed with an underscore to not block the name for a port
        builder.update({'boolean': orm.Bool(False)})
        self.assertEqual(builder.boolean, orm.Bool(False))

    def test_calc_job_node_get_builder_restart(self):
        """Test the `CalcJobNode.get_builder_restart` method."""
        original = orm.CalcJobNode(
            computer=self.computer,
            process_type='aiida.calculations:arithmetic.add',
            label='original')
        original.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        original.set_option('max_wallclock_seconds', 1800)

        original.add_incoming(orm.Int(1).store(),
                              link_type=LinkType.INPUT_CALC,
                              link_label='x')
        original.add_incoming(orm.Int(2).store(),
                              link_type=LinkType.INPUT_CALC,
                              link_label='y')
        original.store()

        builder = original.get_builder_restart()

        self.assertIn('x', builder)
        self.assertIn('y', builder)
        self.assertIn('metadata', builder)
        self.assertIn('options', builder.metadata)
        self.assertEqual(builder.x, orm.Int(1))
        self.assertEqual(builder.y, orm.Int(2))
        self.assertDictEqual(builder.metadata.options, original.get_options())

    def test_code_get_builder(self):
        """Test that the `Code.get_builder` method returns a builder where the code is already set."""
        code = orm.Code()
        code.set_remote_computer_exec((self.computer, '/bin/true'))
        code.label = 'test_code'
        code.set_input_plugin_name('templatereplacer')
        code.store()

        # Check that I can get a builder
        builder = code.get_builder()
        self.assertEqual(builder.code.pk, code.pk)

        # Check that I can set the parameters
        builder.parameters = orm.Dict(dict={})

        # Check that it complains for an unknown input
        with self.assertRaises(AttributeError):
            builder.unknown_parameter = 3

        # Check that it complains if the type is not the correct one (for the templatereplacer, it should be a Dict)
        with self.assertRaises(ValueError):
            builder.parameters = orm.Int(3)
        38
    )  # Loads node that contains the harmonic force constants (Array data)

    machine = {"num_machines": 1, "parallel_env": "mpi*", "tot_num_mpiprocs": 16}

    parameters_md = {
        "timestep": 0.001,
        "temperature": 300,
        "thermostat_variable": 0.5,
        "equilibrium_steps": 2000,
        "total_steps": 2000,
        "dump_rate": 1,
    }

    CombinateCalculation = CalculationFactory("lammps.force")
    inputs = CombinateCalculation.get_builder()

    # Computer options
    options = AttributeDict()
    options.account = ""
    options.qos = ""
    options.resources = {
        "num_machines": 1,
        "num_mpiprocs_per_machine": 1,
        "parallel_env": "localmpi",
        "tot_num_mpiprocs": 1,
    }
    # options.queue_name = 'iqtc04.q'
    options.max_wallclock_seconds = 3600
    inputs.metadata.options = options
Exemplo n.º 11
0
your_cell = [20, 20, 20]  # 晶胞大小,就是 input.inp 里的 cell
your_pbc = [True, True, True]  # 周期性边界条件,先不用管
your_code = 'cp2k@chenglab51'


class ExampleInputSets(InputSetsFromFile):
    def __init__(self, structure, config=your_json, kind_section_config='DZVPSets'):
        super(ExampleInputSets, self).__init__(structure, config, kind_section_config)


# 生成 cp2k input.inp,不是 cp2k.lsf
input_structure = path2structure(your_structure, your_cell, your_pbc)
eis = ExampleInputSets(input_structure)
total_input_sets = eis.generate_cp2k_input_file()
print(total_input_sets)

# 设定参数,准备提交至服务器
Cp2kCalculation = CalculationFactory('cp2k')
builder = Cp2kCalculation.get_builder()
builder.structure = StructureData(ase=input_structure)
builder.parameters = Dict(dict=eis.input_sets)
builder.code = Code.get_from_string(your_code)
builder.metadata.options.resources = {'tot_num_mpiprocs': 28}
builder.metadata.options.max_wallclock_seconds = 20 * 60
builder.metadata.options.queue_name = 'small'

# 提交至服务器(为了不占用资源,实际上仅生成测试的提交文件,并不真正提交至服务器)
builder.metadata.dry_run = True
print("Submitted calculation...")
run(builder)
Exemplo n.º 12
0
            'temp': [300, 300, 0.5]
        }
    },
    "neighbor": [0.3, "bin"],
    "neigh_modify": {
        "every": 1,
        "delay": 0,
        "check": False
    },
    'equilibrium_steps': 100,
    'total_steps': 2000,
    'dump_rate': 1
}

LammpsMDCalculation = CalculationFactory('lammps.md')
inputs = LammpsMDCalculation.get_builder()

# Computer options
options = AttributeDict()
options.account = ''
options.qos = ''
options.resources = {
    'num_machines': 1,
    'num_mpiprocs_per_machine': 1,
    'parallel_env': 'localmpi',
    'tot_num_mpiprocs': 1
}
#options.queue_name = 'iqtc04.q'
options.max_wallclock_seconds = 3600
inputs.metadata.options = options
Exemplo n.º 13
0
def create_builder_from_file(input_folder, input_file_name, code, metadata, pseudo_folder_path=None, use_first=False):
    """Create a populated process builder for a `PwCalculation` from a standard QE input file and pseudo (upf) files

    :param input_folder: the folder containing the input file
    :type input_folder: aiida.common.folders.Folder or str
    :param input_file_name: the name of the input file
    :type input_file_name: str
    :param code: the code associated with the calculation
    :type code: aiida.orm.Code or str
    :param metadata: metadata values for the calculation (e.g. resources)
    :type metadata: dict
    :param pseudo_folder_path: the folder containing the upf files (if None, then input_folder is used)
    :type pseudo_folder_path: aiida.common.folders.Folder or str or None
    :param use_first: passed to UpfData.get_or_create
    :type use_first: bool
    :raises NotImplementedError: if the structure is not ibrav=0
    :return: a builder instance for PwCalculation
    """
    PwCalculation = CalculationFactory('quantumespresso.pw')

    builder = PwCalculation.get_builder()
    builder.metadata = metadata

    if isinstance(code, six.string_types):
        code = Code.get_from_string(code)
    builder.code = code

    # read input_file
    if isinstance(input_folder, six.string_types):
        input_folder = Folder(input_folder)

    with input_folder.open(input_file_name) as input_file:
        parsed_file = PwInputFile(input_file)

    builder.structure = parsed_file.get_structuredata()
    builder.kpoints = parsed_file.get_kpointsdata()

    if parsed_file.namelists['SYSTEM']['ibrav'] != 0:
        raise NotImplementedError('Found ibrav != 0: `aiida-quantumespresso` currently only supports ibrav = 0.')

    # Then, strip the namelist items that the plugin doesn't allow or sets later.
    # NOTE: If any of the position or cell units are in alat or crystal
    # units, that will be taken care of by the input parsing tools, and
    # we are safe to fake that they were never there in the first place.
    parameters_dict = copy.deepcopy(parsed_file.namelists)
    for namelist, blocked_key in PwCalculation._blocked_keywords:  # pylint: disable=protected-access
        for key in list(parameters_dict[namelist].keys()):
            # take into account that celldm and celldm(*) must be blocked
            if re.sub('[(0-9)]', '', key) == blocked_key:
                parameters_dict[namelist].pop(key, None)
    builder.parameters = Dict(dict=parameters_dict)

    # Get or create a UpfData node for the pseudopotentials used for the calculation.
    pseudos_map = {}
    if pseudo_folder_path is None:
        pseudo_folder_path = input_folder
    if isinstance(pseudo_folder_path, six.string_types):
        pseudo_folder_path = Folder(pseudo_folder_path)
    names = parsed_file.atomic_species['names']
    pseudo_file_names = parsed_file.atomic_species['pseudo_file_names']
    pseudo_file_map = {}
    for name, fname in zip(names, pseudo_file_names):
        if fname not in pseudo_file_map:
            local_path = pseudo_folder_path.get_abs_path(fname)
            upf_node, _ = UpfData.get_or_create(local_path, use_first=use_first, store_upf=False)
            pseudo_file_map[fname] = upf_node
        pseudos_map[name] = pseudo_file_map[fname]
    builder.pseudos = pseudos_map

    settings_dict = {}
    if parsed_file.k_points['type'] == 'gamma':
        settings_dict['gamma_only'] = True

    # If there are any fixed coordinates (i.e. force modification) present in the input file, specify in settings
    fixed_coords = parsed_file.atomic_positions['fixed_coords']
    # Function ``any()`` only works for 1-dimensional lists so we have to call it twice manually.
    if any((any(fc_xyz) for fc_xyz in fixed_coords)):
        settings_dict['FIXED_COORDS'] = fixed_coords

    if settings_dict:
        builder.settings = settings_dict

    return builder
Exemplo n.º 14
0
    '1.0 0.001632 0.000 65.20700 2.82100 -0.518000 1.0 1.0 2.63906 3864.27 2.90 0.20 2.93516 6136.44',
    'N  Ga N ':
    '1.0 0.766120 0.000 0.178493 0.20172 -0.045238 1.0 0.0 0.00000 0.00000 2.20 0.20 0.00000 0.00000',
    'N  N  Ga':
    '1.0 0.001632 0.000 65.20700 2.82100 -0.518000 1.0 0.0 0.00000 0.00000 2.90 0.20 0.00000 0.00000',
    'Ga N  Ga':
    '1.0 0.007874 1.846 1.918000 0.75000 -0.301300 1.0 0.0 0.00000 0.00000 2.87 0.15 0.00000 0.00000'
}

# Silicon(C) Tersoff
# tersoff_si = {'Si  Si  Si ': '3.0 1.0 1.7322 1.0039e5 16.218 -0.59826 0.78734 1.0999e-6  1.7322  471.18  2.85  0.15  2.4799  1830.8'}

potential = {'pair_style': 'tersoff', 'data': tersoff_gan}

LammpsForceCalculation = CalculationFactory('lammps.force')
inputs = LammpsForceCalculation.get_builder()

# Computer options
options = AttributeDict()
options.account = ''
options.qos = ''
options.resources = {
    'num_machines': 1,
    'num_mpiprocs_per_machine': 1,
    'parallel_env': 'localmpi',
    'tot_num_mpiprocs': 1
}
#options.queue_name = 'iqtc04.q'
options.max_wallclock_seconds = 3600
inputs.metadata.options = options
Exemplo n.º 15
0
def create_restart(inputs,
                   entry_point='castep.castep',
                   calcjob=None,
                   param_update=None,
                   param_delete=None,
                   restart_mode='restart',
                   use_castep_bin=False,
                   parent_folder=None,
                   reuse=False):
    """
    Function to create a restart for a calculation.
    :param inputs: A builder or nested dictionary
    :param entry_point: Name of the entry points
    :param param_update: Update the parameters
    :param param_delete: A list of parameters to be deleted
    :param restart_mode: Mode of the restart, 'continuation' or 'restart'
    :param use_castep_bin: Use hte 'castep_bin' file instead of check
    :param parent_folder: Remote folder to be used for restart
    :param reuse: Use the reuse mode
    """
    from aiida.plugins import CalculationFactory
    from aiida.engine import ProcessBuilder

    # Create the builder, in any case
    if isinstance(inputs, dict):
        processclass = CalculationFactory(entry_point)
        builder = processclass.get_builder()
    elif isinstance(inputs, ProcessBuilder):
        builder = inputs._process_class.get_builder()

    builder._update(inputs)

    # Update list
    update = {}
    delete = []

    # Set the restart tag
    suffix = '.check' if not use_castep_bin else '.castep_bin'
    if restart_mode == 'continuation':
        update['continuation'] = 'parent/' + builder.metadata.seedname + suffix
        delete.append('reuse')
    elif restart_mode == 'restart' and reuse:
        update['reuse'] = 'parent/' + builder.metadata.seedname + suffix
        delete.append('continuation')
    elif restart_mode is None:
        delete.extend(['continuation', 'reuse'])
    elif restart_mode != 'restart':
        raise RuntimeError('Unknown restart mode: ' + restart_mode)

    if param_update:
        update.update(param_update)
    if param_delete:
        delete.extend(param_delete)

    new_builder = update_parameters(builder,
                                    force=True,
                                    delete=delete,
                                    **update)

    # Set the parent folder
    if parent_folder is not None:
        new_builder[INPUT_LINKNAMES['parent_calc_folder']] = parent_folder

    return new_builder
Exemplo n.º 16
0
def test_get_builder(db_test_app, entry_point):
    from aiida.plugins import CalculationFactory
    cls = CalculationFactory(entry_point)
    cls.get_builder()
Exemplo n.º 17
0
    def _generate_calc_job_node(
        entry_point_name,
        results_folder,
        inputs=None,
        computer=None,
        outputs=None,
        outfile_override=None,
    ):
        """
        Generate a CalcJob node with fake retrieved node in the
        tests/data
        """

        calc_class = CalculationFactory(entry_point_name)
        entry_point = format_entry_point_string('aiida.calculations',
                                                entry_point_name)
        builder = calc_class.get_builder()

        if not computer:
            computer = db_test_app.localhost
        node = CalcJobNode(computer=computer, process_type=entry_point)

        # Monkypatch the inputs
        if inputs is not None:
            inputs = AttributeDict(inputs)
            node.__dict__['inputs'] = inputs
            # Add direct inputs, pseudos are omitted
            for k, v in inputs.items():
                if isinstance(v, Node):
                    if not v.is_stored:
                        v.store()
                    node.add_incoming(v,
                                      link_type=LinkType.INPUT_CALC,
                                      link_label=k)

        options = builder.metadata.options
        options.update(inputs.metadata.options)
        node.set_attribute('input_filename', options.input_filename)
        node.set_attribute('seedname', options.seedname)
        node.set_attribute('output_filename', options.output_filename)
        node.set_attribute('error_filename', 'aiida.err')
        node.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        node.set_option('max_wallclock_seconds', 1800)
        node.store()

        filepath = this_folder.parent / 'data' / results_folder
        retrieved = FolderData()
        retrieved.put_object_from_tree(str(filepath.resolve()))

        # Apply overriding output files
        if outfile_override is not None:
            for key, content in outfile_override.items():
                if content is None:
                    retrieved.delete_object(key)
                    continue
                buf = BytesIO(content.encode())
                retrieved.put_object_from_filelike(buf, key)

        retrieved.add_incoming(node,
                               link_type=LinkType.CREATE,
                               link_label='retrieved')
        retrieved.store()

        if outputs is not None:
            for label, out_node in outputs.items():
                out_node.add_incoming(node,
                                      link_type=LinkType.CREATE,
                                      link_label=label)
                if not out_node.is_stored:
                    out_node.store()

        return node