Example #1
0
class SQSJob(AtomisticGenericJob):
    def __init__(self, project, job_name):
        super(SQSJob, self).__init__(project, job_name)
        self.input = GenericParameters(table_name="input")
        self.input['mole_fractions'] = dict()
        self.input['weights'] = None
        self.input['objective'] = 0.0
        self.input['iterations'] = 1e6
        self.input['output_structures'] = 1
        self._python_only_job = True
        self._lst_of_struct = []

    @property
    def list_of_structures(self):
        return self._lst_of_struct

    def list_structures(self):
        if self.status.finished:
            return self._lst_of_struct
        else:
            return []

    # This function is executed
    def run_static(self):
        self._lst_of_struct, decmp, iterations, cycle_time = get_sqs_structures(
            structure=self.structure,
            mole_fractions=self.input['mole_fractions'],
            weights=self.input['weights'],
            objective=self.input['objective'],
            iterations=self.input['iterations'],
            output_structures=self.input['output_structures'],
            num_threads=self.server.cores)
        for i, structure in enumerate(self._lst_of_struct):
            with self.project_hdf5.open("output/structures/structure_" +
                                        str(i)) as h5:
                structure.to_hdf(h5)
        with self.project_hdf5.open("output") as h5:
            h5["decmp"] = decmp
            h5["cycle_time"] = cycle_time
            h5["iterations"] = iterations
        self.status.finished = True
        self.project.db.item_update(self._runtime(), self.job_id)

    def to_hdf(self, hdf=None, group_name=None):
        super().to_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as h5in:
            self.input.to_hdf(h5in)

    def from_hdf(self, hdf=None, group_name=None):
        super().from_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as h5in:
            self.input.from_hdf(h5in)
        with self.project_hdf5.open("output/structures") as hdf5_output:
            structure_names = hdf5_output.list_groups()
        for group in structure_names:
            with self.project_hdf5.open("output/structures/" +
                                        group) as hdf5_output:
                self._lst_of_struct.append(Atoms().from_hdf(hdf5_output))
Example #2
0
class MetadynInput(Input):
    def __init__(self):
        super(MetadynInput, self).__init__()
        self.iconst = GenericParameters(input_file_name=None,
                                        table_name="iconst",
                                        val_only=True,
                                        comment_char="!")
        self.penaltypot = GenericParameters(
            input_file_name=None,
            table_name="penaltypot",
            val_only=True,
            comment_char="!",
        )
        self._constraint_dict = dict()
        self._complex_constraints = dict()

    def write(self, structure, modified_elements, directory=None):
        """
        Writes all the input files to a specified directory

        Args:
            structure (atomistics.structure.atoms.Atoms instance): Structure to be written
            directory (str): The working directory for the VASP run
        """
        # Writing the constraints, increments, and penalty potentials
        super(MetadynInput, self).write(structure, modified_elements,
                                        directory)
        self.iconst.write_file(file_name="ICONST", cwd=directory)
        self.penaltypot.write_file(file_name="PENALTYPOT", cwd=directory)

    def to_hdf(self, hdf):
        super(MetadynInput, self).to_hdf(hdf)
        with hdf.open("input") as hdf5_input:
            self.iconst.to_hdf(hdf5_input)
            self.penaltypot.to_hdf(hdf5_input)
            hdf5_input["constraint_dict"] = self._constraint_dict
            hdf5_input["complex_constraint_dict"] = self._complex_constraints

    def from_hdf(self, hdf):
        super(MetadynInput, self).from_hdf(hdf)
        with hdf.open("input") as hdf5_input:
            self.iconst.from_hdf(hdf5_input)
            self.penaltypot.from_hdf(hdf5_input)
            if "constraint_dict" in hdf5_input.list_nodes():
                self._constraint_dict = hdf5_input["constraint_dict"]
            if "complex_constraint_dict" in hdf5_input.list_nodes():
                self._complex_constraints = hdf5_input[
                    "complex_constraint_dict"]
Example #3
0
    def _backwards_compatible_input_from_hdf(self):
        if "HDF_VERSION" in self.project_hdf5.list_nodes():
            version = self.project_hdf5["HDF_VERSION"]
        else:
            version = "0.1.0"

        if version == "0.1.0":
            with self.project_hdf5.open("input") as hdf5_input:
                gp = GenericParameters(table_name="custom_dict")
                gp.from_hdf(hdf5_input)
                for k in gp.keys():
                    self.input[k] = gp[k]
        elif version == "0.2.0":
            with self.project_hdf5.open("input") as hdf5_input:
                self.input.from_hdf(hdf5_input)
        else:
            raise ValueError("Cannot handle hdf version: {}".format(version))
Example #4
0
class SxHarmPotTst(AtomisticGenericJob):
    def __init__(self, project, job_name):
        super(SxHarmPotTst, self).__init__(project, job_name)
        self.__version__ = "0.1"
        self.__name__ = "SxHarmPotTst"
        self.input = GenericParameters(table_name="interaction")
        self.input["interaction_radius"] = 4.0
        self.input["maximum_noise"] = 0.26
        self._positions_lst = []
        self._forces_lst = []
        self._md_job_id = None
        self._md_job = None

    @property
    def md_job(self):
        if self._md_job is None and self._md_job_id is not None:
            self._md_job = self.project.load(self._md_job_id)
        return self._md_job

    @md_job.setter
    def md_job(self, job):
        if job.status == "finished":
            self._md_job_id = job.job_id
            self._md_job = job
            self._positions_lst = job["output/generic/positions"]
            self._forces_lst = job["output/generic/forces"]
        else:
            raise ValueError("Job not finished!")

    def set_input_to_read_only(self):
        """
        This function enforces read-only mode for the input classes, but it has to be implement in the individual
        classes.
        """
        super(SxHarmPotTst, self).set_input_to_read_only()
        self.input.read_only = True

    def write_harmpot(self, cwd, file_name="harmpot.sx"):
        harm_pot_str = (
            "format harmpot;\n\n"
            + "valenceCharge=0;\n"
            + "harmonicPotential {\n"
            + '   //include "refSym.sx";\n'
            + '   //include "equivalence.sx";\n'
            + "   maxDist="
            + str(float(self.input["maximum_noise"]) / BOHR_TO_ANGSTROM)
            + ";\n"
            + '   include "shells.sx";\n'
            + "}\n"
            + 'include "structure_wrapper.sx";'
        )
        if cwd is not None:
            file_name = os.path.join(cwd, file_name)
        with open(file_name, "w") as f:
            f.write(harm_pot_str)

    def write_structure(self, cwd, file_name="structure_wrapper.sx"):
        structure_file_name = "structure.sx"
        iw = InputWriter()
        iw.structure = self._md_job.structure
        iw.write_structure(file_name=structure_file_name, cwd=cwd)
        with open(os.path.join(cwd, file_name), "w") as f:
            f.writelines(["structure { include <" + structure_file_name + ">; }"])

    def validate_ready_to_run(self):
        if len(self._positions_lst) == 0 or len(self._forces_lst) == 0:
            raise ValueError()

    def write_input(self):
        self.write_structure(
            cwd=self.working_directory, file_name="structure_wrapper.sx"
        )
        self.write_harmpot(cwd=self.working_directory, file_name="harmpot.sx")
        pos_force_mat = np.concatenate((self._positions_lst, self._forces_lst), axis=2)
        cont_pos_force_mat = pos_force_mat.reshape(-1, pos_force_mat.shape[-1])
        np.savetxt(
            os.path.join(self.working_directory, "POSITIONs"), cont_pos_force_mat
        )
        lines = [
            "#!/bin/bash\n",
            "sxstructparam -i structure_wrapper.sx -c "
            + str(float(self.input["interaction_radius"]) / BOHR_TO_ANGSTROM)
            + " --printReduced=shells.sx --log\n",
            "sxharmpottst --param=POSITIONs --vasp --printHesse HesseMatrix_sphinx -i harmpot.sx --log --svd\n",
        ]
        with open(os.path.join(self.working_directory, "sxharmpottst.sh"), "w") as f:
            f.writelines(lines)

    def get_hesse_matrix(self):
        if "output" in self.project_hdf5.list_groups():
            return self.project_hdf5["output/hesse"]

    def collect_output(self):
        with self.project_hdf5.open("output") as hdf_out:
            hdf_out["hesse"] = np.loadtxt(
                os.path.join(self.working_directory, "HesseMatrix_sphinx")
            )

    def to_hdf(self, hdf=None, group_name=None):
        """
        Store the ExampleJob object in the HDF5 File

        Args:
            hdf (ProjectHDFio): HDF5 group object - optional
            group_name (str): HDF5 subgroup name - optional
        """
        super(SxHarmPotTst, self).to_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            self.input.to_hdf(hdf5_input)
            if len(self._positions_lst) != 0:
                hdf5_input["positions"] = self._positions_lst
            if len(self._forces_lst) != 0:
                hdf5_input["forces"] = self._forces_lst
            if self._md_job_id is not None:
                hdf5_input["md_job_id"] = self._md_job_id

    def from_hdf(self, hdf=None, group_name=None):
        """
        Restore the ExampleJob object in the HDF5 File

        Args:
            hdf (ProjectHDFio): HDF5 group object - optional
            group_name (str): HDF5 subgroup name - optional
        """
        super(SxHarmPotTst, self).from_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            self.input.from_hdf(hdf5_input)
            if "positions" in hdf5_input.list_nodes():
                self._positions_lst = hdf5_input["positions"]
            if "forces" in hdf5_input.list_nodes():
                self._forces_lst = hdf5_input["forces"]
            if "md_job_id" in hdf5_input.list_nodes():
                self._md_job_id = hdf5_input["md_job_id"]
Example #5
0
class SxUniqDispl(AtomisticGenericJob):
    def __init__(self, project, job_name):
        super(SxUniqDispl, self).__init__(project, job_name)
        self.__version__ = "0.1"
        self.__name__ = "SxUniqDispl"
        self.input = GenericParameters(table_name="displacement")
        self.input["displacement"] = 0.01
        self.structure_lst = []
        self._id_pyi_to_spx = []
        self._id_spx_to_pyi = []

    @property
    def id_spx_to_pyi(self):
        if self.structure is None:
            return None
        if len(self._id_spx_to_pyi) == 0:
            self._initialize_order()
        return self._id_spx_to_pyi

    @property
    def id_pyi_to_spx(self):
        if self.structure is None:
            return None
        if len(self._id_pyi_to_spx) == 0:
            self._initialize_order()
        return self._id_pyi_to_spx

    def _initialize_order(self):
        for elm_species in self.structure.get_species_objects():
            self._id_pyi_to_spx.append(
                np.arange(len(self.structure))[
                    self.structure.get_chemical_symbols() == elm_species.Abbreviation
                ]
            )
        self._id_pyi_to_spx = np.array(
            [ooo for oo in self._id_pyi_to_spx for ooo in oo]
        )
        self._id_spx_to_pyi = np.array([0] * len(self._id_pyi_to_spx))
        for i, p in enumerate(self._id_pyi_to_spx):
            self._id_spx_to_pyi[p] = i

    def set_input_to_read_only(self):
        """
        This function enforces read-only mode for the input classes, but it has to be implement in the individual
        classes.
        """
        super(SxUniqDispl, self).set_input_to_read_only()
        self.input.read_only = True

    def list_structures(self):
        if self.status.finished:
            return self.structure_lst
        else:
            return []

    def write_structure(self, cwd, file_name="structure_wrapper.sx"):
        structure_file_name = "structure.sx"
        iw = InputWriter()
        iw.structure = self.structure
        iw.write_structure(file_name=structure_file_name, cwd=cwd)
        with open(os.path.join(cwd, file_name), "w") as f:
            f.writelines(["structure { include <" + structure_file_name + ">; }"])

    def extract_structure(self, working_directory):
        structure_lst = [self.structure]
        parser = Output(self)
        for f in os.listdir(working_directory):
            if "input-disp" in f:
                structure_template = self.structure.copy()
                parser.collect_relaxed_hist(file_name=f, cwd=working_directory)
                structure_template.cell = parser._parse_dict["cell"][0]
                structure_template.positions = parser._parse_dict["positions"][0]
                structure_lst.append(structure_template)
        return structure_lst

    def write_input(self):
        self.write_structure(
            cwd=self.working_directory, file_name="structure_wrapper.sx"
        )
        lines = [
            "#!/bin/bash\n",
            "sxuniqdispl --log -d "
            + str(float(self.input["displacement"]) / BOHR_TO_ANGSTROM)
            + " -i structure_wrapper.sx\n",
        ]
        with open(os.path.join(self.working_directory, "sxuniqdispl.sh"), "w") as f:
            f.writelines(lines)

    def collect_output(self):
        self.structure_lst = self.extract_structure(
            working_directory=self.working_directory
        )
        with self.project_hdf5.open("output") as hdf_out:
            for ind, struct in enumerate(self.structure_lst):
                struct.to_hdf(hdf=hdf_out, group_name="structure_" + str(ind))

    def to_hdf(self, hdf=None, group_name=None):
        """
        Store the ExampleJob object in the HDF5 File

        Args:
            hdf (ProjectHDFio): HDF5 group object - optional
            group_name (str): HDF5 subgroup name - optional
        """
        super(SxUniqDispl, self).to_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            self.input.to_hdf(hdf5_input)

    def from_hdf(self, hdf=None, group_name=None):
        """
        Restore the ExampleJob object in the HDF5 File

        Args:
            hdf (ProjectHDFio): HDF5 group object - optional
            group_name (str): HDF5 subgroup name - optional
        """
        super(SxUniqDispl, self).from_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            self.input.from_hdf(hdf5_input)
        if "output" in self.project_hdf5.list_groups():
            with self.project_hdf5.open("output") as hdf5_output:
                self.structure_lst = [
                    Atoms().from_hdf(hdf5_output, group_name)
                    for group_name in hdf5_output.list_groups()
                ]
class InteractiveWrapper(GenericMaster):
    def __init__(self, project, job_name):
        super(InteractiveWrapper, self).__init__(project, job_name)
        self._ref_job = None
        self.input = GenericParameters("parameters")

    @property
    def structure(self):
        if self.ref_job:
            return self._ref_job.structure
        else:
            return None

    @structure.setter
    def structure(self, basis):
        if self.ref_job:
            self._ref_job.structure = basis
        else:
            raise ValueError(
                "A structure can only be set after a start job has been assinged."
            )

    @property
    def ref_job(self):
        """
        Get the reference job template from which all jobs within the ParallelMaster are generated.

        Returns:
            GenericJob: reference job
        """
        if self._ref_job is not None:
            return self._ref_job
        try:
            if isinstance(self[0], GenericJob):
                self._ref_job = self[0]
                return self._ref_job
            else:
                return None
        except IndexError:
            return None

    @ref_job.setter
    def ref_job(self, ref_job):
        """
        Set the reference job template from which all jobs within the ParallelMaster are generated.

        Args:
            ref_job (GenericJob): reference job
        """
        if not ref_job.server.run_mode.interactive:
            warnings.warn(
                "Run mode of the reference job not set to interactive")
        self.append(ref_job)

    def set_input_to_read_only(self):
        """
        This function enforces read-only mode for the input classes, but it has to be implement in the individual
        classes.
        """
        self.input.read_only = True

    def validate_ready_to_run(self):
        """
        Validate that the calculation is ready to be executed. By default no generic checks are performed, but one could
        check that the input information is complete or validate the consistency of the input at this point.
        """
        self.ref_job.validate_ready_to_run()

    def check_setup(self):
        """
        Checks whether certain parameters (such as plane wave cutoff radius in DFT) are changed from the pyiron standard
        values to allow for a physically meaningful results. This function is called manually or only when the job is
        submitted to the queueing system.
        """
        try:
            self.ref_job.check_setup()
        except AttributeError:
            pass

    def ref_job_initialize(self):
        """

        """
        if len(self._job_name_lst) > 0:
            self._ref_job = self.pop(-1)
            if self._job_id is not None and self._ref_job._master_id is None:
                self._ref_job.master_id = self.job_id
                self._ref_job.server.cores = self.server.cores

    def get_final_structure(self):
        """

        Returns:

        """
        warnings.warn(
            "get_final_structure() is deprecated - please use get_structure() instead.",
            DeprecationWarning,
        )
        if self.ref_job:
            return self._ref_job.get_structure(iteration_step=-1)
        else:
            return None

    def to_hdf(self, hdf=None, group_name=None):
        """
        Store the InteractiveWrapper in an HDF5 file

        Args:
            hdf (ProjectHDFio): HDF5 group object - optional
            group_name (str): HDF5 subgroup name - optional
        """
        if self._ref_job is not None and self._ref_job.job_id is None:
            self.append(self._ref_job)
        super(InteractiveWrapper, self).to_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            self.input.to_hdf(hdf5_input)

    def from_hdf(self, hdf=None, group_name=None):
        """
        Restore the InteractiveWrapper from an HDF5 file

        Args:
            hdf (ProjectHDFio): HDF5 group object - optional
            group_name (str): HDF5 subgroup name - optional
        """
        super(InteractiveWrapper, self).from_hdf(hdf=hdf,
                                                 group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            self.input.from_hdf(hdf5_input)

    def collect_output(self):
        pass

    def collect_logfiles(self):
        pass

    def db_entry(self):
        """
        Generate the initial database entry

        Returns:
            (dict): db_dict
        """
        db_dict = super(InteractiveWrapper, self).db_entry()
        if self.structure:
            if isinstance(self.structure, PAtoms):
                parent_structure = self.structure.get_parent_basis()
            else:
                parent_structure = ase_to_pyiron(
                    self.structure).get_parent_basis()
            db_dict["ChemicalFormula"] = parent_structure.get_chemical_formula(
            )
        return db_dict

    def _db_entry_update_run_time(self):
        """

        Returns:

        """
        job_id = self.get_job_id()
        db_dict = {}
        start_time = self.project.db.get_item_by_id(job_id)["timestart"]
        db_dict["timestop"] = datetime.now()
        db_dict["totalcputime"] = (db_dict["timestop"] - start_time).seconds
        self.project.db.item_update(db_dict, job_id)

    def _finish_job(self):
        """

        Returns:

        """
        self.status.finished = True
        self._db_entry_update_run_time()
        self._logger.info("{}, status: {}, monte carlo master".format(
            self.job_info_str, self.status))
        self._calculate_successor()
        self.send_to_database()
        self.update_master()

    def __getitem__(self, item):
        """
        Get/ read data from the GenericMaster

        Args:
            item (str, slice): path to the data or key of the data object

        Returns:
            dict, list, float, int: data or data object
        """
        child_id_lst = self.child_ids
        child_name_lst = [
            self.project.db.get_item_by_id(child_id)["job"]
            for child_id in self.child_ids
        ]
        if isinstance(item, int):
            total_lst = child_name_lst + self._job_name_lst
            item = total_lst[item]
        return self._get_item_when_str(item=item,
                                       child_id_lst=child_id_lst,
                                       child_name_lst=child_name_lst)