Ejemplo n.º 1
0
 def from_hdf(self, hdf=None, group_name=None):
     super(TableJob, self).from_hdf(hdf=hdf, group_name=group_name)
     with self.project_hdf5.open("input") as hdf5_input:
         if 'project' in hdf5_input.list_nodes():
             project_dict = hdf5_input['project']
             project = self.project.__class__(
                 path=project_dict['path'],
                 user=project_dict['user'],
                 sql_query=project_dict['sql_query'])
             project._filter = project_dict['filter']
             project._inspect_mode = project_dict['inspect_mode']
             self.analysis_project = project
         if 'filter' in hdf5_input.list_nodes():
             self._filter_function_str = hdf5_input['filter']
             self.pyiron_table.filter_function = get_function_from_string(
                 hdf5_input['filter'])
         bool_dict = hdf5_input['bool_dict']
         self._enforce_update = bool_dict['enforce_update']
         self._pyiron_table.convert_to_object = bool_dict[
             'convert_to_object']
         self._pyiron_table.add._from_hdf(hdf5_input)
     pyiron_table = os.path.join(self.working_directory, 'pyirontable.csv')
     if os.path.exists(pyiron_table):
         try:
             self._pyiron_table._df = pandas.read_csv(pyiron_table)
             self._pyiron_table._csv_file = pyiron_table
         except EmptyDataError:
             pass
     else:
         with self.project_hdf5.open("output") as hdf5_output:
             if 'table' in hdf5_output.list_nodes():
                 self._pyiron_table._df = pandas.DataFrame(
                     json.loads(hdf5_output['table']))
Ejemplo n.º 2
0
    def from_hdf(self, hdf=None, group_name=None):
        """
        Restore pyiron table job from HDF5

        Args:
            hdf:
            group_name:
        """
        super(TableJob, self).from_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            if "project" in hdf5_input.list_nodes():
                project_dict = hdf5_input["project"]
                project = self.project.__class__(
                    path=project_dict["path"],
                    user=project_dict["user"],
                    sql_query=project_dict["sql_query"],
                )
                project._filter = project_dict["filter"]
                project._inspect_mode = project_dict["inspect_mode"]
                self.analysis_project = project
            if "filter" in hdf5_input.list_nodes():
                self.pyiron_table._filter_function_str = hdf5_input["filter"]
                self.pyiron_table.filter_function = get_function_from_string(
                    hdf5_input["filter"]
                )
            if "db_filter" in hdf5_input.list_nodes():
                self.pyiron_table._db_filter_function_str = hdf5_input["db_filter"]
                self.pyiron_table.db_filter_function = get_function_from_string(
                    hdf5_input["db_filter"]
                )
            bool_dict = hdf5_input["bool_dict"]
            self._enforce_update = bool_dict["enforce_update"]
            self._pyiron_table.convert_to_object = bool_dict["convert_to_object"]
            self._pyiron_table.add._from_hdf(hdf5_input)
        pyiron_table = os.path.join(self.working_directory, "pyirontable.csv")
        if os.path.exists(pyiron_table):
            try:
                self._pyiron_table._df = pandas.read_csv(pyiron_table)
                self._pyiron_table._csv_file = pyiron_table
            except EmptyDataError:
                pass
        else:
            with self.project_hdf5.open("output") as hdf5_output:
                if "table" in hdf5_output.list_nodes():
                    self._pyiron_table._df = pandas.DataFrame(
                        json.loads(hdf5_output["table"])
                    )
Ejemplo n.º 3
0
    def from_hdf(self, hdf=None, group_name=None):
        """
        Restore the SerialMaster from an HDF5 file

        Args:
            hdf (ProjectHDFio): HDF5 group object - optional
            group_name (str): HDF5 subgroup name - optional
        """
        super(SerialMasterBase, self).from_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            self._input.from_hdf(hdf5_input)
            convergence_goal_str = hdf5_input["convergence_goal"]
            if convergence_goal_str == "None":
                self._convergence_goal = None
            else:
                self._convergence_goal_str = convergence_goal_str
                self._convergence_goal = get_function_from_string(convergence_goal_str)
                self._convergence_goal_qwargs = hdf5_input["convergence_goal_qwargs"]
Ejemplo n.º 4
0
    def from_hdf(self, hdf=None, group_name=None):
        """
        Restore the ParameterMaster from an HDF5 file

        Args:
            hdf (ProjectHDFio): HDF5 group object - optional
            group_name (str): HDF5 subgroup name - optional
        """
        super(MapMaster, self).from_hdf(hdf=hdf, group_name=group_name)
        with self.project_hdf5.open("input") as hdf5_input:
            if "structures" in hdf5_input.list_groups():
                with hdf5_input.open("structures") as hdf5_input_str:
                    self.parameter_list = [
                        Atoms().from_hdf(hdf5_input_str, group_name)
                        for group_name in sorted(hdf5_input_str.list_groups())
                    ]
            else:
                self.parameter_list = hdf5_input["parameters_list"]
            function_str = hdf5_input["map_function"]
            if function_str == "None":
                self._map_function = None
            else:
                self._map_function = get_function_from_string(function_str)