def start(self): """ Retrieve and initialize paramters of the WorkChain, validate inputs """ self.report( 'INFO: Started structure relaxation workflow version {}\n'.format( self._workflowversion)) self.ctx.info = [] # Collects Hints self.ctx.warnings = [] # Collects Warnings self.ctx.errors = [] # Collects Errors # Pre-initialization of some variables self.ctx.loop_count = 0 # Counts relax restarts self.ctx.forces = [] # Collects forces self.ctx.final_cell = None # The relaxed Bravais matrix self.ctx.final_atom_positions = None # Relaxed atom positions self.ctx.pbc = None # Boundary conditions self.ctx.reached_relax = False # Bool if is relaxed self.ctx.switch_bfgs = False # Bool if BFGS should be switched on self.ctx.scf_res = None # Last scf results self.ctx.final_structure = None # The optimized structure self.ctx.total_magnetic_moment = None # initialize the dictionary using defaults if no wf paramters are given wf_default = copy.deepcopy(self._default_wf_para) if 'wf_parameters' in self.inputs: wf_dict = self.inputs.wf_parameters.get_dict() else: wf_dict = wf_default extra_keys = [] for key in wf_dict.keys(): if key not in wf_default.keys(): extra_keys.append(key) if extra_keys: error = 'ERROR: input wf_parameters for Relax contains extra keys: {}'.format( extra_keys) self.report(error) return self.exit_codes.ERROR_INVALID_INPUT_PARAM # extend wf parameters given by user using defaults for key, val in six.iteritems(wf_default): wf_dict[key] = wf_dict.get(key, val) self.ctx.wf_dict = wf_dict if '49' in wf_dict['atoms_off']: error = '"49" label for atoms_off is reserved for internal use' self.report(error) return self.exit_codes.ERROR_INVALID_INPUT_PARAM # Check if final scf can be run run_final = wf_dict.get('run_final_scf', False) if run_final: # We need inpgen to be there input_scf = AttributeDict( self.exposed_inputs(FleurScfWorkChain, namespace='scf')) # policy, reuse as much as possible from scf namespace input_final_scf = input_scf if 'remote_data' in input_final_scf: del input_final_scf.remote_data if 'structure' in input_final_scf: del input_final_scf.structure if 'fleurinp' in input_final_scf: del input_final_scf.fleurinp if 'wf_parameters' in input_final_scf: del input_final_scf.wf_parameters if 'final_scf' in self.inputs: # Will defaults of namespace override other given options? input_final_scf_given = AttributeDict( self.exposed_inputs(FleurScfWorkChain, namespace='final_scf')) for key, val in input_final_scf_given.items(): input_final_scf[key] = val self.ctx.input_final_scf = input_final_scf if 'inpgen' not in input_scf and 'inpgen' not in input_final_scf: self.report( 'Error: Wrong input: inpgen missing for final scf.') return self.exit_codes.ERROR_INPGEN_MISSING
def _generate_calc_job_node( entry_point_name, results_folder, inputs=None, computer=None, outputs=None, outfile_override=None, ): """ Generate a CalcJob node with fake retrieved node in the tests/data """ calc_class = CalculationFactory(entry_point_name) entry_point = format_entry_point_string('aiida.calculations', entry_point_name) builder = calc_class.get_builder() if not computer: computer = db_test_app.localhost node = CalcJobNode(computer=computer, process_type=entry_point) # Monkypatch the inputs if inputs is not None: inputs = AttributeDict(inputs) node.__dict__['inputs'] = inputs # Add direct inputs, pseudos are omitted for k, v in inputs.items(): if isinstance(v, Node): if not v.is_stored: v.store() node.add_incoming(v, link_type=LinkType.INPUT_CALC, link_label=k) options = builder.metadata.options options.update(inputs.metadata.options) node.set_attribute('input_filename', options.input_filename) node.set_attribute('seedname', options.seedname) node.set_attribute('output_filename', options.output_filename) node.set_attribute('error_filename', 'aiida.err') node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) node.set_option('max_wallclock_seconds', 1800) node.store() filepath = this_folder.parent / 'data' / results_folder retrieved = FolderData() retrieved.put_object_from_tree(str(filepath.resolve())) # Apply overriding output files if outfile_override is not None: for key, content in outfile_override.items(): if content is None: retrieved.delete_object(key) continue buf = BytesIO(content.encode()) retrieved.put_object_from_filelike(buf, key) retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') retrieved.store() if outputs is not None: for label, out_node in outputs.items(): out_node.add_incoming(node, link_type=LinkType.CREATE, link_label=label) if not out_node.is_stored: out_node.store() return node