def run_create(self):
        """
        Launch the first, "create" sub-process.
        """
        self.report(f"Running create process '{self.inputs.create_process.value}'")

        create_process_class = load_object(self.inputs.create_process.value)

        return ToContext(
            create_process=self.run_or_submit(create_process_class, **self.inputs.create)
        )
 def fp_run(self):
     """
     Run the first-principles calculation workflows.
     """
     self.report('Submitting reference_bands workflow.')
     reference_bands = self.submit(
         load_object(self.inputs.reference_bands_workflow),
         **ChainMap(
             self.inputs['reference_bands'],
             self.exposed_inputs(ReferenceBandsBase,
                                 namespace='reference_bands')))
     self.report('Submitting wannier_input workflow.')
     wannier_input = self.submit(
         load_object(self.inputs.wannier_input_workflow),
         **ChainMap(
             self.inputs['wannier_input'],
             self.exposed_inputs(WannierInputBase,
                                 namespace='wannier_input')))
     return ToContext(reference_bands=reference_bands,
                      wannier_input=wannier_input)
Beispiel #3
0
 def fp_run(self):
     """
     Runs the first-principles calculation workflow.
     """
     self.report("Starting DFT workflows.")
     return ToContext(fp_run=self.submit(
         load_object(self.inputs.fp_run_workflow),
         **ChainMap(
             self.inputs.fp_run,
             self.exposed_inputs(FirstPrinciplesRunBase,
                                 namespace='fp_run'),
         )))
 def evaluate_bands(self):
     """
     Add the tight-binding model to the outputs and run the evaluation workflow.
     """
     self.report("Adding tight-binding model to output.")
     tb_model = self.ctx.tbextraction_calc.outputs.tb_model
     self.out('tb_model', tb_model)
     self.report("Running model evaluation.")
     return ToContext(model_evaluation_wf=self.submit(
         load_object(self.inputs.model_evaluation_workflow),
         tb_model=tb_model,
         **ChainMap(
             self.inputs.model_evaluation,
             self.exposed_inputs(ModelEvaluationBase),
         )))
Beispiel #5
0
 def launch_evaluations(self):
     """
     Create evaluations for the current iteration step.
     """
     self.report('Launching pending evaluations.')
     with self.optimizer() as opt:
         evals = {}
         evaluate_process = load_object(self.inputs.evaluate_process.value)
         for idx, inputs in opt.create_inputs().items():
             self.report('Launching evaluation {}'.format(idx))
             inputs_merged = ChainMap(inputs,
                                      self.inputs.get('evaluate', {}))
             if is_process_function(evaluate_process):
                 _, node = run_get_node(evaluate_process, **inputs_merged)
             else:
                 node = self.submit(evaluate_process, **inputs_merged)
             evals[self.eval_key(idx)] = node
             self.indices_to_retrieve.append(idx)
     return self.to_context(**evals)
    def run_evaluate(self):
        """
        Retrieve outputs of the "create" sub-process, and launch the
        "evaluate" sub-process.
        """
        create_process_outputs = get_outputs_dict(self.ctx.create_process)
        self.out('create', create_process_outputs)
        if not self.ctx.create_process.is_finished_ok:
            return self.exit_codes.ERROR_CREATE_PROCESS_FAILED

        self.report(f"Running evaluate process '{self.inputs.evaluate_process.value}'")
        evaluate_process_class = load_object(self.inputs.evaluate_process.value)

        output_input_mapping = self.inputs.output_input_mapping.get_dict()
        created_inputs = {
            in_key: create_process_outputs[out_key]
            for out_key, in_key in output_input_mapping.items() if out_key in create_process_outputs
        }
        return ToContext(
            evaluate_process=self.
            run_or_submit(evaluate_process_class, **self.inputs.evaluate, **created_inputs)
        )
Beispiel #7
0
    def run_evaluate(self):
        """
        Runs the model evaluation workflow.
        """
        tb_model = self.ctx.tbextraction_calc.outputs.tb_model
        self.report("Adding tight-binding model to output.")
        self.out('tb_model', tb_model)

        # slice reference bands if necessary
        reference_bands = self.ctx.fp_run.outputs.bands
        slice_reference_bands = self.inputs.get('slice_reference_bands', None)
        if slice_reference_bands is not None:
            reference_bands = slice_bands_inline(
                bands=reference_bands, slice_idx=slice_reference_bands)
        self.report('Starting model evaluation workflow.')
        return ToContext(model_evaluation_wf=self.submit(
            load_object(self.inputs.model_evaluation_workflow),
            tb_model=tb_model,
            reference_bands=reference_bands,
            **ChainMap(
                self.inputs.model_evaluation,
                self.exposed_inputs(ModelEvaluationBase),
            )))
Beispiel #8
0
    def run_process(self):
        """
        Merge the inputs namespace and added inputs, and launch the
        sub-process.
        """
        self.report("Merging inputs for the sub-process.")

        if isinstance(self.inputs.added_input_keys, orm.Str):
            added_input_keys = [self.inputs.added_input_keys.value]
            if not isinstance(self.inputs.added_input_values, orm.BaseType):
                raise InputValidationError(
                    "When 'added_input_keys' is given as 'Str', 'added_input_values'"
                    " must be a 'BaseType' instance.")
            added_input_values = [self.inputs.added_input_values.value]
        else:
            added_input_keys = self.inputs.added_input_keys.get_list()
            if not isinstance(self.inputs.added_input_values, orm.List):
                raise InputValidationError(
                    "When 'added_input_keys' is given as 'List', 'added_input_values'"
                    " must also be a 'List'.")
            added_input_values = self.inputs.added_input_values.get_list()

        if len(added_input_values) != len(added_input_keys):
            raise InputValidationError(
                "Lengths of 'added_input_values' and 'added_input_keys' do not match."
            )

        inputs = AttributeDict(self.inputs.inputs)

        def _get_or_create_sub_dict(in_dict, name):
            try:
                return in_dict[name]
            except KeyError:
                res = {}
                in_dict[name] = res
                return res

        def _get_or_create_port(in_attr_dict, name):
            try:
                return getattr(in_attr_dict, name)
            except AttributeError:
                res = AttributeDict()
                setattr(in_attr_dict, name, res)
                return res

        for key, value in zip(added_input_keys, added_input_values):
            full_port_path, *full_attr_path = key.split(':')
            *port_path, port_name = full_port_path.split('.')
            namespace = reduce(_get_or_create_port, port_path, inputs)
            if not full_attr_path:
                res_value = to_aiida_type(value)
            else:
                assert len(full_attr_path) == 1
                # Get or create the top-level dictionary.
                try:
                    res_dict = getattr(namespace, port_name).get_dict()
                except AttributeError:
                    res_dict = {}

                *sub_dict_path, attr_name = full_attr_path[0].split('.')
                sub_dict = reduce(_get_or_create_sub_dict, sub_dict_path,
                                  res_dict)
                sub_dict[attr_name] = value
                res_value = orm.Dict(dict=res_dict).store()

            setattr(namespace, port_name, res_value)

        self.report("Launching the sub-process.")
        return ToContext(sub_process=self.run_or_submit(
            load_object(self.inputs.sub_process.value), **inputs))
Beispiel #9
0
 def engine(self):
     return load_object(self.inputs.engine.value)