# #########################################################
df_slab = get_df_slab()

# #########################################################
df_jobs_paths = get_df_jobs_paths()

# #########################################################
df_jobs_data = get_df_jobs_data()

# #########################################################
df_jobs_anal = get_df_jobs_anal()
df_jobs_anal_completed = df_jobs_anal[df_jobs_anal.job_completely_done == True]

# #########################################################
df_init_slabs = get_df_init_slabs()
# -

# # Removing rows that don't have the necessary files present locally
#
# Might need to download them with rclone

indices_tmp = [
    ('sherlock', 'ripirefu_15', 'bare', 62.0, 1),
    ('sherlock', 'ripirefu_15', 'bare', 66.0, 1),
    ('sherlock', 'ripirefu_15', 'bare', 67.0, 1),
    ('sherlock', 'ripirefu_15', 'oh', 49.0, 0),
    ('sherlock', 'ripirefu_15', 'oh', 49.0, 2),
    ('sherlock', 'ripirefu_15', 'oh', 49.0, 3),
    ('sherlock', 'ripirefu_15', 'oh', 62.0, 0),
    ('sherlock', 'ripirefu_15', 'oh', 62.0, 1),
# ### Save data to pickle

# Pickling data ###########################################
directory = os.path.join(os.environ["PROJ_irox_oer"],
                         "dft_workflow/job_analysis/get_init_slabs_bare_oh",
                         "out_data")
if not os.path.exists(directory): os.makedirs(directory)
with open(os.path.join(directory, "df_init_slabs.pickle"), "wb") as fle:
    pickle.dump(df_init_slabs, fle)
# #########################################################

# +
from methods import get_df_init_slabs

df_init_slabs_tmp = get_df_init_slabs()
df_init_slabs_tmp.head()
# -

# #########################################################
print(20 * "# # ")
print("All done!")
print("Run time:", np.round((time.time() - ti) / 60, 3), "min")
print("get_init_slabs_bare_oh.ipynb")
print(20 * "# # ")
# #########################################################

# + active=""
#
#
#