Ejemplo n.º 1
0
    def run(self):
        df_list = []
        for directory in self.configuration.log_folders:
            list_log_folders = os.listdir(directory)
            dir_name = os.path.basename(os.path.dirname(directory))
            list_log_folders = [
                d for d in list_log_folders
                if os.path.isdir(os.path.join(directory, d))
            ]

            for tb_output_folder in list_log_folders:
                print("working on:", tb_output_folder)
                x = EventAccumulator(
                    path=os.path.join(directory, tb_output_folder))
                x.Reload()
                x.FirstEventTimestamp()
                keys = self.configuration.properties
                print_out_dict = {}

                for i in range(len(keys)):
                    print_out_dict.update({
                        f"{dir_name}_{keys[i]}_{tb_output_folder}":
                        [e.value for e in x.Scalars(keys[i])]
                    })

                df = pd.DataFrame(data=print_out_dict)
                df_list.append(df)

        complete_df = pd.concat(df_list, axis=1)
        complete_df.to_csv(
            os.path.join(self.configuration.output_path,
                         self.configuration.output_name))
Ejemplo n.º 2
0
def load_one_dir(path: Path):
    x = EventAccumulator(path=str(path))
    x.Reload()
    x.FirstEventTimestamp()
    keys = {
        'train/running_return': "Return",
        'debug/expected_q_values': "Q-values"
    }
    df = None
    for k, v in keys.items():
        try:
            time_steps = x.Scalars(k)
        except KeyError:
            logging.warning("Did not find the key in {}".format(path))
            continue
        wall_time, steps, values = list(zip(*time_steps))
        df_new = pd.DataFrame(data={"Epoch": steps, v: values})
        if df is None:
            df = df_new
        else:
            df = df.merge(df_new, on="Epoch")

    experiment_name, data, method, seed = str(path).split("/")[-4:]
    n = len(df)

    df["Method"] = [method] * n
    df["Experiment"] = [experiment_name] * n
    df["Seed"] = [seed] * n
    df["Dataset"] = [data] * n
    return df
Ejemplo n.º 3
0
def export_tensorboard(check_only=False):
    root_dir = os.path.expanduser("~/Documents/master-thesis")

    tb_dir = os.path.join(root_dir, "logs/tblogs")

    log_dirs = sorted(glob.glob(os.path.join(tb_dir, "*")))

    tags = ["train_train_loss", "val_acc", "val_mse"]

    train_data = {"tags": tags}

    for tb_output_folder in log_dirs:
        x = EventAccumulator(path=tb_output_folder)
        x.Reload()
        x.FirstEventTimestamp()

        if tags[0] in x.scalars.Keys():
            name = os.path.basename(tb_output_folder)
            train_data[name] = {}

            for tag in tags:
                train_data[name][tag] = []

                for e in x.Scalars(tag):
                    train_data[name][tag].append({
                        "step": e.step,
                        "value": e.value,
                        "wall_time": e.wall_time,
                    })

                if check_only:
                    break

            print(tb_output_folder, "-> OK")
        else:
            print(tb_output_folder, "-> NO")

    if not check_only:
        with open(os.path.join(root_dir, "logs/tblogs.json"), "w") as f:
            json.dump(train_data, f, indent=4)
Ejemplo n.º 4
0
def extract_data(path, is_concat=True):
    runs = glob(os.path.join(BASEDIR, path, 'runs', '*/events.*'))

    if not len(runs):
        raise FileNotFoundError(f"{path} is not contain events.")
    df_list = []

    for exp in runs:
        x = EventAccumulator(path=exp)
        x.Reload()
        x.FirstEventTimestamp()
        tags = x.Tags()['scalars']  # ['Loss/Train', 'Loss/Val', 'Acc/Top1']

        steps = [e.step for e in x.Scalars(tags[0])]
        # wall_time = [e.wall_time for e in x.Scalars(tags[0])]
        # index = [e.index for e in x.Scalars(tags[0])]
        # count = [e.count for e in x.Scalars(tags[0])]
        n_steps = len(steps)

        data = np.zeros((n_steps, len(tags)))
        for i in range(len(tags)):
            data[:, i] = [e.value for e in x.Scalars(tags[i])]

        data_dict = {}
        for idx, tag in enumerate(tags):
            data_dict[tag] = data[:, idx]

        exp_name = os.path.basename(os.path.dirname(exp))
        data_dict['Name'] = [exp_name] * n_steps

        _df = pd.DataFrame(data=data_dict)

        if is_concat:
            df_list.append(_df)
        else:
            _df.to_csv(exp_name + '.csv', index_label='step')

    if is_concat:
        df = pd.concat(df_list)
        df.to_csv('Output.csv', index_label='step')
Ejemplo n.º 5
0
def tensorboard_loaddata(dir, ppo=PPO):
    if ppo:
        keys = ['batch/reward_mean', 'eval/reward_sum']  # ppo
    else:
        keys = ['reward/batch', 'eval/reward_sum']  # td3

    x = EventAccumulator(path=dir)
    x.Reload()
    x.FirstEventTimestamp()

    steps = []
    wall_time = []
    index = []
    count = []
    data = []

    for k in keys:
        steps.append([e.step for e in x.Scalars(k)])
        wall_time.append([e.wall_time for e in x.Scalars(k)])
        # index.append([e.index for e in x.Scalars(k)])
        # count.append([e.count for e in x.Scalars(k)])
        data.append([e.value for e in x.Scalars(k)])
    return wall_time, steps, data
from tensorboard.backend.event_processing.event_accumulator import EventAccumulator
import matplotlib.pyplot as plt

folder = './evals/preliminary/action_space/polar/1/'

keys = ['batch/reward_mean', 'eval/reward_sum']

x = EventAccumulator(path=folder)
x.Reload()
x.FirstEventTimestamp()

steps = []
wall_time = []
index = []
count = []
data = []

for k in keys:
    steps.append([e.step for e in x.Scalars(k)])
    wall_time.append([e.wall_time for e in x.Scalars(k)])
    index.append([e.index for e in x.Scalars(k)])
    count.append([e.count for e in x.Scalars(k)])
    data.append([e.value for e in x.Scalars(k)])

plt.subplot(1, 2, 1)
plt.plot(steps[0], data[0])
plt.subplot(1, 2, 2)
plt.plot(steps[1], data[1])
plt.show()