def __init__(
        self,
        tree_level=None,
        level_entries=None,
        skip_dirs_lst=None,
        indiv_dir_lst=None,  # <-----------------------------------------------
        indiv_job_lst=None,
        root_dir=".",
        working_dir=".",
        update_job_state=False,
        load_dataframe=True,
    ):
        """Initialize Jobs_Manager instance.

        Args:
            tree_level:
            level_entries:
            skip_dirs_lst:
            working_dir:
            update_job_state:
            load_dataframe:

        TEMP TEMP
        """
        #| - __init__

        DFT_Jobs_Analysis.__init__(
            self,
            # # system=system,
            # tree_level=tree_level,
            # level_entries=level_entries,
            # working_dir=working_dir,
            # update_job_state=update_job_state,
            # load_dataframe=load_dataframe,
            tree_level=tree_level,
            level_entries=level_entries,
            skip_dirs_lst=skip_dirs_lst,
            indiv_dir_lst=indiv_dir_lst,
            indiv_job_lst=indiv_job_lst,
            root_dir=root_dir,
            working_dir=working_dir,
            update_job_state=update_job_state,
            load_dataframe=load_dataframe,

            # dataframe_dir=None,
            # job_type_class=None,
            # folders_exist=None,
        )
Exemple #2
0
    def __create_jobs_an__(self):
        """Create Jobs_Analysis instances for each step of workflow."""
        # | - __create_jobs_an__
        # print("PREPARING EXTENDED FOLDER SYSTEM")  #PERM_PRINT
        step_dir_names = self.step_dir_names
        master_root_dir = self.root_dir

        Jobs_Inst_list = []
        for step in range(len(step_dir_names)):
            step_num = step + 1

            print("Initializing Job Instance: " + str(step_num))  # PERM_PRINT

            # dir_struct_file = master_root_dir + "/" + step_dir_names[step] + \
            #     "/jobs_bin/dir_structure.json"

            level_labels_tmp = self.tree_level_labels_list[step]
            level_entries_tmp = self.tree_level_values_list[step]

            indiv_dir_lst_tmp = self.indiv_dir_lst_list[step]
            indiv_job_lst_tmp = self.indiv_job_lst_list[step]

            JobsAn = DFT_Jobs_Analysis(
                tree_level=level_labels_tmp,
                level_entries=level_entries_tmp,
                indiv_dir_lst=indiv_dir_lst_tmp,
                indiv_job_lst=indiv_job_lst_tmp,

                root_dir=master_root_dir,
                working_dir=step_dir_names[step],
                update_job_state=False,
                load_dataframe=False,
                )

            Jobs_Inst_list.append(JobsAn)

        return(Jobs_Inst_list)
dft_inst = DFT_Methods(
    methods_to_run=[
        "elec_energy",
        "init_atoms",
        "atoms_object",
        "dft_params",
        # "incar",
        # "outcar"
    ],
    DFT_code="QE",
)

Jobs = DFT_Jobs_Analysis(
    indiv_dir_lst=dir_list,
    working_dir=".",
    folders_exist=True,
    load_dataframe=False,
    job_type_class=dft_inst,
)
#__|

df_all = Jobs.data_frame
df_m = Jobs.filter_early_revisions(Jobs.data_frame)

#| - Job Maintance
print(25 * "*")

tally = {"successes": 0, "failures": 0, "running": 0, "pending": 0}

for Job_i in Jobs.Job_list:
    path_i = Job_i.full_path
import sys
import os

import pickle

import pandas as pd
pd.set_option("display.max_columns", None)
pd.set_option('display.max_rows', None)
# -

from dft_job_automat.job_analysis import DFT_Jobs_Analysis

Jobs = DFT_Jobs_Analysis(
    update_job_state=False,
    job_type_class=None,
    load_dataframe=True,
    root_dir='/mnt/c/Users/raul_desktop/Dropbox/01_norskov/04_comp_clusters/02_DATA/04_IrOx_surfaces_OER',
    working_dir='/mnt/c/Users/raul_desktop/Dropbox/01_norskov/04_comp_clusters/02_DATA/04_IrOx_surfaces_OER',
    dataframe_dir='/mnt/c/Users/raul_desktop/Dropbox/01_norskov/04_comp_clusters/02_DATA/04_IrOx_surfaces_OER/181226_new_job_df',
    )

df = Jobs.filter_early_revisions(Jobs.data_frame)

df[df["bulk_system"] == "IrO3_battery"]

df[
    (df["bulk_system"] == "IrO3_battery") & \
    (df["surface_type"] == "a")

    ]

# +
Exemple #5
0
    def __init__(
        self,
        tree_level=None,
        level_entries=None,
        skip_dirs_lst=None,
        indiv_dir_lst=None,  # <-----------------------------------------------
        indiv_job_lst=None,
        root_dir=".",
        working_dir=".",
        update_job_state=False,
        load_dataframe=True,
        dataframe_dir=None,
        job_type_class=None,
        methods_to_run=None,
        folders_exist=None,
        parse_all_revisions=True,
    ):
        """Initialize Jobs_Manager instance.

        Args:
            tree_level:
            level_entries:
            skip_dirs_lst:
            working_dir:
            update_job_state:
            load_dataframe:

        TEMP TEMP
        """
        # | - __init__

        # | - __old__
        # tree_level=None,
        # level_entries=None,
        # skip_dirs_lst=None,
        # indiv_dir_lst=None,  # <-----------------------------------------------
        # indiv_job_lst=None,
        # root_dir=".",
        # working_dir=".",
        # update_job_state=False,
        # load_dataframe=True,
        # dataframe_dir=dataframe_dir,
        # job_type_class=job_type_class,
        # methods_to_run=methods_to_run,
        # folders_exist=folders_exist,
        # parse_all_revisions=parse_all_revisions,
        # __|

        DFT_Jobs_Analysis.__init__(
            self,
            tree_level=tree_level,
            level_entries=level_entries,
            skip_dirs_lst=skip_dirs_lst,
            indiv_dir_lst=indiv_dir_lst,
            indiv_job_lst=indiv_job_lst,
            root_dir=root_dir,
            working_dir=working_dir,
            update_job_state=update_job_state,
            load_dataframe=load_dataframe,
            dataframe_dir=dataframe_dir,
            job_type_class=job_type_class,
            methods_to_run=methods_to_run,
            folders_exist=folders_exist,
            parse_all_revisions=parse_all_revisions,
        )
    else:
        out = None

    return(out)
    #__|

#__|

Jobs = DFT_Jobs_Analysis(
    indiv_job_dict_lst=master_job_list,
    working_dir="..",
    folders_exist=True,
    load_dataframe=False,
    # job_type_class=dft_inst,
    job_type_class=None,
    parse_all_revisions=False,
    methods_to_run=[
        parse_info,
        parse_out_for_mismatch,
        ]
    )

df_all = Jobs.data_frame
df_m = Jobs.filter_early_revisions(Jobs.data_frame)
#__|


#| - __old__
# print(40 * "#")
#