コード例 #1
0
def load_tb(force=0):
    try:
        if force:
            raise FileNotFoundError
        df = pd.read_pickle(cache_path)
    except FileNotFoundError:
        dfs = []
        paths = glob.glob(f"{tensorboard_path}/*/model*/PPO_1")
        for tb_path in paths:
            print(f"Loading {tb_path}")
            if not os.path.exists(tb_path):
                continue
            df = tflogs2pandas.tflog2pandas(tb_path)
            df = df[df["metric"].str.startswith("eval/")]
            # df["alignment_id"] = job["seed"]
            # df["custom_alignment"] = job["custom_alignment"]
            # df["str_md5"] = job["str_md5"]
            # df["vacc_run_seed"] = job['run_seed']
            # df["label"] = job['label']
            if "best" in tb_path:
                df["label"] = "#1"
            else:
                df["label"] = "#2"
            dfs.append(df)
        df = pd.concat(dfs)
        # print(df)
        df.to_pickle(cache_path)
    return df
コード例 #2
0
def load_tb(force=0):
    try:
        if force:
            raise FileNotFoundError
        df = pd.read_pickle(cache_path)
    except FileNotFoundError:
        dfs = []
        for idx, job in all_jobs.items():
            tb_path = f"output_data/tensorboard_vanilla4/model-399-499-599-699-CustomAlignWrapper-md{job['str_md5']}-sd{job['run_seed']}/PPO_1"
            # paths = glob.glob(tb_path)
            # for tb_path in paths:
            # _tmp = tb_path.split("sd")[-1]
            # vacc_run_seed = _tmp.split("/")[0] # need to read the run seed from vacc..
            print(f"Loading {tb_path}")
            if not os.path.exists(tb_path):
                continue
            df = tflogs2pandas.tflog2pandas(tb_path)
            df = df[df["metric"].str.startswith("eval/")]
            # df["num_mutate"] = job["num_mutate"]
            df["alignment_id"] = job["seed"]
            df["custom_alignment"] = job["custom_alignment"]
            df["str_md5"] = job["str_md5"]
            df["vacc_run_seed"] = job['run_seed']
            dfs.append(df)
        df = pd.concat(dfs)
        # print(df)
        df.to_pickle(cache_path)
    return df
コード例 #3
0
def oracles():
    df_oracle = pd.DataFrame()
    seeds = list(range(2))
    body_types = [300, 400, 500, 600]
    num_tested_bodies = 16
    if False:
        for body_type in body_types:
            for tested in range(num_tested_bodies):
                body = body_type + tested
                for seed in seeds:
                    path = f"output_data/tensorboard_oracles/model-{body}-sd{seed}/PPO_1"
                    print(f"Loading {path}")
                    df = tflog2pandas(path)
                    df_body = df[df["metric"] ==
                                 f"eval/{body}_mean_reward"].copy()
                    if df_body.shape[0] != 62:
                        print(df_body.shape)
                        continue
                    df_body["body"] = template(body)
                    df_body["seed"] = seed
                    df_oracle = df_oracle.append(df_body)
        df_oracle["method"] = "oracle"
        df_oracle.to_pickle("output_data/tmp/df_oracle")
    else:
        df_oracle = pd.read_pickle("output_data/tmp/df_oracle")
    print(df_oracle)
    return df_oracle
コード例 #4
0
def same_topology_get_oracles(tensorboard_path, read_cache=False):
    method = "oracles"
    df_results = pd.DataFrame()
    seeds = list(range(2))
    body_types = [
        300,400,500,600
    ]
    num_tested_bodies = 16
    try:
        if not read_cache:
            raise Exception("not_read_cache")
        df_results = pd.read_pickle(f"output_data/tmp/same_topology_{method}")
    except Exception as e:
        if str(e)!="not_read_cache" and not isinstance(e, FileNotFoundError):
            raise e
        for body_type in body_types:
            for tested in range(num_tested_bodies):
                body = body_type + tested
                for seed in seeds:
                    path = f"{tensorboard_path}/model-{body}-sd{seed}/PPO_1"
                    print(f"Loading {path}")
                    df = tflog2pandas(path)
                    df_body = df[df["metric"]==f"eval/{body}_mean_reward"].copy()
                    if df_body.shape[0]!=312:
                        print(df_body.shape)
                        raise Exception("Data is not complete.")
                    df_body["body"] = template(body)
                    df_body["seed"] = seed
                    df_results = df_results.append(df_body)
        df_results["method"] = "oracle"
        df_results["num_bodies"] = 1
        print(f"Saving file output_data/tmp/same_topology_{method}...")
        df_results.to_pickle(f"output_data/tmp/same_topology_{method}")
    return df_results
コード例 #5
0
 def read_df(body):
     dfs = []
     for seed in seeds:
         folder = f"{tb_folder}/model-{body}-sd{seed}/PPO_1"
         print(f"Loading {folder} ...")
         df = tflog2pandas(folder)
         if df.shape[0] < 2697:
             print(f"Data not complete! Skip!")
             continue
         df = df[df["metric"] == f"eval/{body}_mean_reward"]
         max_value = df["value"].max()
         final_value = df.iloc[-1, df.columns.get_loc("value")]
         df = pd.DataFrame(
             {
                 "body": template(body),
                 "body_id": body,
                 "max_value": max_value,
                 "final_value": final_value,
                 "seed": seed,
             },
             index=[body])
         dfs.append(df)
     if len(dfs) > 0:
         return pd.concat(dfs)
     return None
コード例 #6
0
def load_tb(force=0):
    cache_path = "output_data/tmp/9xx_tb_results.pandas"
    try:
        if force:
            raise FileNotFoundError
        df = pd.read_pickle(cache_path)
    except FileNotFoundError:
        visited = {}
        dfs = []
        for job in all_jobs:
            if job['str_md5'] in visited:  # because I failed to seed while generating run seed, so I don't know what the run seeds are used.. :( need to use glob to find out.
                continue
            visited[job['str_md5']] = True

            tb_path = f"output_data/tensorboard_9xx/9xx_mutate_{job['num_mutate']}/model-900-901-902-903-904-905-906-907-CustomAlignWrapper-md{job['str_md5']}-sd*/PPO_1"
            paths = glob.glob(tb_path)
            for tb_path in paths:
                _tmp = tb_path.split("sd")[-1]
                vacc_run_seed = _tmp.split("/")[
                    0]  # need to read the run seed from vacc..
                print(f"Loading {tb_path}")
                if not os.path.exists(tb_path):
                    continue
                df = tflogs2pandas.tflog2pandas(tb_path)
                df = df[df["metric"].str.startswith("eval/")]
                df["num_mutate"] = job["num_mutate"]
                df["body_seed"] = job["body_seed"]
                df["custom_alignment"] = job["custom_alignment"]
                df["str_md5"] = job["str_md5"]
                df["vacc_run_seed"] = vacc_run_seed
                dfs.append(df)
        df = pd.concat(dfs)
        print(df)
        df.to_pickle(cache_path)
    return df
コード例 #7
0
def same_topology_get_results(tensorboard_path, body_arr, seed, method, read_cache=False):
    df_results = pd.DataFrame()
    str_body_arr = '-'.join(str(x) for x in body_arr)
    stacked_training_bodies = np.array(body_arr)
    exp = [
        (stacked_training_bodies + 300).tolist(),
        (stacked_training_bodies + 400).tolist(),
        (stacked_training_bodies + 500).tolist(),
        (stacked_training_bodies + 600).tolist(),
    ]
    seeds = [seed]
    need_retrain = []
    try:
        if not read_cache:
            raise Exception("not_read_cache")
        df_results = pd.read_pickle(f"output_data/tmp/same_topology_{str_body_arr}_sd{seed}_{method}")
    except Exception as e:
        if str(e)!="not_read_cache" and not isinstance(e, FileNotFoundError):
            raise e
        for bodies in exp:
            filename = "-".join([str(x) for x in bodies])
            for seed in seeds:
                if method=="joints_only":
                    str_method = "-ra-ph-pfc"
                elif method=="aligned":
                    str_method = "-"
                elif method=="general_joints_feetcontact":
                    str_method = "-ra"
                elif method=="joints_feetcontact":
                    str_method = "-ra-ph"
                else:
                    str_method = f"-{method}"
                path = f"{tensorboard_path}/model-{filename}{str_method}-sd{seed}/PPO_1"
                if not os.path.exists(path):
                    path = f"{tensorboard_path}/model-{filename}-{method}-sd{seed}/PPO_1"
                    if not os.path.exists(path):
                        raise Exception(f"Path not found. {path}")
                print(f"Loading {path}")
                df = tflog2pandas(path)
                for body in bodies:
                    df_body = df[df["metric"]==f"eval/{body}_mean_reward"].copy()
                    if df_body.shape[0]<62:
                        print(df_body.shape)
                        need_retrain.append({
                            "bodies": bodies,
                            "seed": seed,
                            "method": method,
                        })
                        # raise Exception("Data is not complete.")
                    df_body["body"] = template(body)
                    df_body["seed"] = seed
                    df_body["method"] = method
                    df_body["num_bodies"] = len(body_arr)
                    df_results = df_results.append(df_body)

        df_results.to_pickle(f"output_data/tmp/same_topology_{str_body_arr}_sd{seed}_{method}")
        
    return df_results
コード例 #8
0
def load_tb(force=0):
    try:
        if force:
            raise FileNotFoundError
        df = pd.read_pickle(cache_path)
    except FileNotFoundError:
        dfs = []
        all_jobs = []
        use_glob = False
        try:
            with open(f"output_data/jobs/{exp_name}.pickle", "rb") as f:
                all_jobs = pickle.load(f)
        except:
            use_glob = True
        if use_glob:
            all_jobs = glob.glob(f"{tensorboard_path}/*/PPO_1")

        for job in all_jobs:
            if use_glob:
                tb_path = job
            else:
                print(job)
                train_on_bodies = job["train_on_bodies"]
                str_bodies = "-".join([str(x) for x in train_on_bodies])
                custom_alignment = job["custom_alignment"]
                if custom_alignment == "":
                    method = "aligned"
                else:
                    method = "randomized"
                job['label'] = method
                num_bodies = len(train_on_bodies)
                tb_path = f"{tensorboard_path}/{num_bodies}_{method}/model-{str_bodies}-CustomAlignWrapper-md{job['str_md5']}-sd{job['run_seed']}/PPO_1"
            print(f"Loading {tb_path}")
            if not os.path.exists(tb_path):
                continue
            df = tflogs2pandas.tflog2pandas(tb_path)
            df = df[df["metric"].str.startswith("eval/")]
            if not use_glob:
                df["alignment_id"] = job["seed"]
                df["custom_alignment"] = job["custom_alignment"]
                df["str_md5"] = job["str_md5"]
                df["vacc_run_seed"] = job['run_seed']
                df["str_bodies"] = str_bodies
                df["label"] = method
                df["num_bodies"] = num_bodies
            dfs.append(df)
        df = pd.concat(dfs)
        df.to_pickle(cache_path)
    # get robot name:
    df["robot_id"] = df["metric"].str.slice(start=5, stop=8)
    df["robot"] = df["robot_id"].apply(
        lambda x: common.gym_interface.template(int(x)).capitalize())
    df["Learnability"] = df["value"]
    return df
コード例 #9
0
def diff_topologies_get_results(tensorboard_path,
                                body_arr,
                                seed,
                                case_id,
                                read_cache=False):
    df_results = pd.DataFrame()
    str_body_arr = '-'.join(str(x) for x in body_arr)
    exp = [body_arr]
    seeds = [seed]
    need_retrain = []
    try:
        if not read_cache:
            raise Exception("not_read_cache")
        df_results = pd.read_pickle(
            f"output_data/tmp/diff_topology_{str_body_arr}_sd{seed}_case{case_id}"
        )
    except Exception as e:
        if str(e) != "not_read_cache" and not isinstance(e, FileNotFoundError):
            raise e
        for bodies in exp:
            filename = "-".join([str(x) for x in bodies])
            for seed in seeds:
                str_method = f"-Walker2DHopperCase{case_id}"
                path = f"{tensorboard_path}/model-{filename}{str_method}-sd{seed}/PPO_1"
                if not os.path.exists(path):
                    raise Exception(f"Path not found. {path}")
                print(f"Loading {path}")
                df = tflog2pandas(path)
                for body in bodies:
                    df_body = df[df["metric"] ==
                                 f"eval/{body}_mean_reward"].copy()
                    if df_body.shape[0] < 12:
                        print(df_body.shape)
                        need_retrain.append({
                            "bodies": bodies,
                            "seed": seed,
                            "case": case_id,
                        })
                        # raise Exception("Data is not complete.")
                    df_body["body"] = template(body)
                    df_body["seed"] = seed
                    df_body["case"] = case_id
                    df_body["num_bodies"] = len(body_arr)
                    df_results = df_results.append(df_body)

        df_results.to_pickle(
            f"output_data/tmp/diff_topology_{str_body_arr}_sd{seed}_case{case_id}"
        )

    return df_results
コード例 #10
0
def ph_values():
    df_ph_values = pd.DataFrame()

    stacked_training_bodies = np.arange(start=0, stop=16)
    exp = [
        (stacked_training_bodies + 300).tolist(),
        (stacked_training_bodies + 400).tolist(),
        (stacked_training_bodies + 500).tolist(),
        (stacked_training_bodies + 600).tolist(),
    ]
    print(exp)
    seeds = list(range(10))
    methods = ["joints_feet", "joints"]
    need_retrain = []
    if False:
        for bodies in exp:
            filename = "-".join([str(x) for x in bodies])
            for method in methods:
                for seed in seeds:
                    if method == "joints":
                        str_method = "-ra-ph-pfc"
                    elif method == "joints_feet":
                        str_method = "-ra-ph"
                    path = f"output_data/tensorboard/model-{filename}{str_method}-sd{seed}/PPO_1"
                    print(f"Loading {path}")
                    df = tflog2pandas(path)
                    for body in bodies:
                        df_body = df[df["metric"] ==
                                     f"eval/{body}_mean_reward"].copy()
                        if df_body.shape[0] != 62:
                            print(df_body.shape)
                            need_retrain.append({
                                "bodies": bodies,
                                "seed": seed,
                                "method": method,
                            })
                            break

                        df_body["body"] = template(body)
                        df_body["seed"] = seed
                        df_body["method"] = method
                        # df_body["filename"] = filename
                        df_ph_values = df_ph_values.append(df_body)

        df_ph_values.to_pickle("output_data/tmp/df_ph_values")
    else:
        df_ph_values = pd.read_pickle("output_data/tmp/df_ph_values")
    print(df_ph_values)
    return df_ph_values
コード例 #11
0
 def read_df(body):
     dfs = []
     for seed in [0, 1, 2]:
         folder = f"output_data/tensorboard_oracle/model-{body}-caseWalker2DHopperWrapper-sd{seed}/PPO_1"
         print(f"Loading {folder} ...")
         df = tflog2pandas(folder)
         df = df[df["metric"] == f"eval/{body}_mean_reward"]
         max_value = df["value"].max()
         final_value = df.iloc[-1, df.columns.get_loc("value")]
         df = pd.DataFrame(
             {
                 "body": template(body),
                 "body_id": body,
                 "max_value": max_value,
                 "final_value": final_value,
                 "seed": seed,
             },
             index=[body])
         dfs.append(df)
     return pd.concat(dfs)
コード例 #12
0
 def read_df(body):
     dfs = []
     for seed in [0, 1]:
         folder = f"output_data/tensorboard_oracle_random_bodies/model-{body}-sd{seed}/PPO_1"
         print(f"Loading {folder} ...")
         df = tflog2pandas(folder)
         if df.shape[0] != 1353:
             return None  # Fly-away bug causes the job to abort
         df = df[df["metric"] == f"eval/{body}_mean_reward"]
         max_value = df["value"].max()
         final_value = df.iloc[-1, df.columns.get_loc("value")]
         df = pd.DataFrame(
             {
                 "body": template(body),
                 "body_id": body,
                 "max_value": max_value,
                 "final_value": final_value,
                 "seed": seed,
             },
             index=[body])
         dfs.append(df)
     return pd.concat(dfs)
コード例 #13
0
def load_tb(force=0):
    try:
        if force:
            raise FileNotFoundError
        df = pd.read_pickle(cache_path)
    except FileNotFoundError:
        dfs = []
        for name, str_md5 in all_str_md5.items():
            for run_seed in range(10):
                tb_path = f"output_data/tensorboard_9xx_confirm/9xx_mutate_confirm/model-900-901-902-903-904-905-906-907-CustomAlignWrapper-md{str_md5}-sd{run_seed}/PPO_1"
                print(f"Loading {tb_path}")
                _df = tflogs2pandas.tflog2pandas(tb_path)
                _df = _df[_df["metric"].str.startswith("eval/")]
                _tmp = name.split("_")
                _df["exp"] = _tmp[-1]
                _df["alignment"] = _tmp[0]
                _df["name"] = name
                _df["str_md5"] = str_md5
                _df["run_seed"] = run_seed
                dfs.append(_df)
        df = pd.concat(dfs)
        df.to_pickle(cache_path)
    return df
コード例 #14
0
def load_tb(force=0):
    try:
        if force:
            raise FileNotFoundError
        df = pd.read_pickle(cache_path)
    except FileNotFoundError:
        dfs = []
        for idx, job in all_jobs.items():
            tb_path = f"{tensorboard_path}/model-399-499-599-699-CustomAlignWrapper-md{job['str_md5']}-sd{job['run_seed']}/PPO_1"
            print(f"Loading {tb_path}")
            if not os.path.exists(tb_path):
                continue
            df = tflogs2pandas.tflog2pandas(tb_path)
            df = df[df["metric"].str.startswith("eval/")]
            df["alignment_id"] = job["seed"]
            df["custom_alignment"] = job["custom_alignment"]
            df["str_md5"] = job["str_md5"]
            df["vacc_run_seed"] = job['run_seed']
            df["label"] = job['label']
            dfs.append(df)
        df = pd.concat(dfs)
        # print(df)
        df.to_pickle(cache_path)
    return df
コード例 #15
0
def all_values():
    df_max_values = pd.DataFrame()
    df_all_values = pd.DataFrame()

    stacked_training_bodies = np.arange(start=0, stop=16)
    exp = [
        (stacked_training_bodies + 300).tolist(),
        (stacked_training_bodies + 400).tolist(),
        (stacked_training_bodies + 500).tolist(),
        (stacked_training_bodies + 600).tolist(),
    ]
    print(exp)
    seeds = list(range(10))
    methods = ["align", "random"]
    need_retrain = []
    if False:
        for bodies in exp:
            filename = "-".join([str(x) for x in bodies])
            for method in methods:
                for seed in seeds:
                    if method == "align":
                        str_method = ""
                    elif method == "misalign":
                        str_method = "-mis"
                    elif method == "random":
                        str_method = "-ra"
                    path = f"output_data/tensorboard/model-{filename}{str_method}-sd{seed}/PPO_1"
                    print(f"Loading {path}")
                    df = tflog2pandas(path)
                    for body in bodies:
                        df_body = df[df["metric"] ==
                                     f"eval/{body}_mean_reward"].copy()
                        if df_body.shape[0] != 62:
                            print(df_body.shape)
                            need_retrain.append({
                                "bodies": bodies,
                                "seed": seed,
                                "method": method,
                            })
                            break

                        df_max_values = df_max_values.append(
                            {
                                "body": template(body),
                                "seed": seed,
                                "method": method,
                                "max_value": df_body["value"].max(),
                                "filename": filename,
                            },
                            ignore_index=True)

                        df_body["body"] = template(body)
                        df_body["seed"] = seed
                        df_body["method"] = method
                        df_body["filename"] = filename
                        df_all_values = df_all_values.append(df_body)

        df_all_values.to_pickle("output_data/tmp/df_all_values")
    else:
        df_all_values = pd.read_pickle("output_data/tmp/df_all_values")
    print(df_all_values)  #[68448 rows x 7 columns]
    return df_all_values
コード例 #16
0
import pandas as pd
from common.tflogs2pandas import tflog2pandas

import glob

df_results = pd.DataFrame()
filenames = glob.glob("output_data/tensorboard/model-*/PPO_1")
for filename in filenames:
    print(filename)
    df = tflog2pandas(filename)
    df = df[df["metric"] == "time/fps"]
    average_fps = df["value"].mean()
    min_fps = df["value"].min()
    print(
        "average_fps: ",
        average_fps,
        ", min_fps: ",
        min_fps,
    )
    df_results = df_results.append(
        {
            "path": filename,
            "average_fps": average_fps,
            "min_fps": min_fps,
        },
        ignore_index=True)

df_results.to_pickle("output_data/tmp/which_nodes_are_slow")
コード例 #17
0
ファイル: 4.plot_1.py プロジェクト: liusida/thesis-bodies
try:
    df = pd.read_pickle(cache_filename)
except:
    # if True:
    dfs = []
    for body in bodies:
        for seed in all_seeds:
            for stackframe in all_stackframe:
                path = f"output_data/tensorboard/model-{body}"
                if stackframe > 0:
                    path += f"-stack{stackframe}"
                path += f"-sd{seed}/SAC_1"
                print(f"Loading {path}")
                if not os.path.exists(path):
                    continue
                df = tflog2pandas(path)
                df["body"] = body
                df["seed"] = seed
                df["stackframe"] = stackframe
                df = df[df["metric"] == f"eval/{body}_mean_reward"]
                print(df.shape)
                print(df.head())
                dfs.append(df)

    df = pd.concat(dfs)
    df.to_pickle(cache_filename)
    print(df.shape)
# df = df[::100]
print(df[df["seed"] == 0].head())
print(df[df["seed"] == 1].head())
print(df[df["seed"] == 2].head())
コード例 #18
0
#!/usr/bin/env python3

import glob
import os
import pprint
import traceback

import click
import pandas as pd
from tensorboard.backend.event_processing.event_accumulator import EventAccumulator
from common.tflogs2pandas import tflog2pandas

df = tflog2pandas("output_data/tensorboard/model-400-stack4-sd4/PPO_1")
print(df)
コード例 #19
0
def load_tb(force=0):
    try:
        if force:
            raise FileNotFoundError
        df = pd.read_pickle(cache_path)
    except FileNotFoundError:
        dfs = []
        all_jobs = []
        count_cache = [None] * 100
        str_md5_cache = [None] * 100
        use_glob = False
        try:
            with open(f"output_data/jobs/{exp_name}.pickle", "rb") as f:
                all_jobs = pickle.load(f)
        except:
            use_glob = True
        if use_glob:
            all_jobs = glob.glob(f"{tensorboard_path}/*/*/PPO_1")

        for job in all_jobs:
            if use_glob:
                tb_path = job
                import re
                match = re.findall(
                    r'9xx_mutate_([0-9]+)\/model-([0-9\-]+)-CustomAlignWrapper-md([0-9a-z]+)-sd([0-9]+)',
                    tb_path)
                if len(match) == 0:
                    print("Error")
                    exit(1)
                match = match[0]
                num_mutate = int(match[0])
                str_bodies = match[1]
                str_md5 = match[2]
                run_seed = match[3]
            else:
                print(job)
                num_mutate = job["num_mutate"]
                str_bodies = "-".join([
                    str(x)
                    for x in np.arange(start=900, stop=908, dtype=np.int)
                ])
                custom_alignment = job["custom_alignment"]
                if custom_alignment == "":
                    method = "aligned"
                else:
                    method = "randomized"
                job['label'] = method
                tb_path = f"{tensorboard_path}/9xx_mutate_{num_mutate}/model-{str_bodies}-CustomAlignWrapper-md{job['str_md5']}-sd{job['run_seed']}/PPO_1"
            print(f"Loading {tb_path}")
            if not os.path.exists(tb_path):
                continue
            df = tflogs2pandas.tflog2pandas(tb_path)
            df = df[df["metric"].str.startswith("eval/")]
            if use_glob:
                if count_cache[num_mutate] is None:
                    count_cache[num_mutate] = 0
                    str_md5_cache[num_mutate] = {}
                if not str_md5 in str_md5_cache[num_mutate]:
                    count_cache[num_mutate] += 1
                    str_md5_cache[num_mutate][str_md5] = count_cache[
                        num_mutate]
                df["num_mutate"] = num_mutate
                df["str_md5"] = str_md5
                df["run_seed"] = run_seed
                df["label"] = f"# {str_md5_cache[num_mutate][str_md5]}"
            else:
                df["num_mutate"] = job["num_mutate"]
                df["alignment_id"] = job["seed"]
                df["custom_alignment"] = job["custom_alignment"]
                df["str_md5"] = job["str_md5"]
                df["vacc_run_seed"] = job['run_seed']
                # df["str_bodies"] = str_bodies

            dfs.append(df)
        df = pd.concat(dfs)
        df.to_pickle(cache_path)
    # get robot name:
    df["robot_id"] = df["metric"].str.slice(start=5, stop=8)
    df["robot"] = df["robot_id"].apply(
        lambda x: common.gym_interface.template(int(x)).capitalize())
    df["Learnability"] = df["value"]
    return df
コード例 #20
0
if sys.argv[1] == "python":
    sys.argv = sys.argv[2:]
from common import common
args = common.args



str_md5 = hashlib.md5(args.custom_alignment.encode()).hexdigest()

folder = f"output_data/{args.tensorboard}/model-{args.train_bodies_str}-CustomAlignWrapper-md{str_md5}-sd{args.seed}/PPO_1"
if not os.path.exists(folder):
    print("results don't exists! tell manager, please!")
    print(folder)
    exit(1)

df = tflog2pandas(folder)
results = []
for body in args.train_bodies:
    df_one_body = df[df["metric"] == f"eval/{body}_mean_reward"]
    if df_one_body.shape[0]==0:
        print("null result! tell manager, please!")
        print(folder)
        exit(1)
    # doesn't impose weighted sum here
    results.append(df_one_body["value"].max())
print(results)
fitness = np.mean(results)

# To avoid infinite loop submitting many jobs
time.sleep(2)