Exemplo n.º 1
0
def plot_contour(train_dir):
    ant_train_points = Path(train_dir)
    env_name = os.path.basename(ant_train_points)
    generated_dirs_dir = f"generated_dirs/_temp_dir/"

    if os.path.exists(generated_dirs_dir):
        shutil.rmtree(generated_dirs_dir)
    checkpoint_names = [checkpoint for checkpoint in os.listdir(ant_train_points) if checkpoint[-1] == '0']
    checkpoint_names.sort()

    vectors = []
    for checkpoint in (checkpoint_names):
        check_param_fname = ant_train_points / checkpoint / "parameters.th"
        params = torch.load(check_param_fname,map_location=torch.device('cpu')).values()
        dir1 = [v.detach().flatten() for v in params]
        vector = torch.cat(dir1,axis=0).numpy()
        vectors.append(vector)

    decomposer = sklearn.decomposition.PCA(2)
    coords = decomposer.fit_transform(vectors)
    # center points at final coordinate, i.e. center of plot
    coords -= coords[-1]
    dir1, dir2 = decomposer.components_
    dir1 = npvec_to_nplist(dir1, params)
    dir2 = npvec_to_nplist(dir2, params)
    scale1,scale2 = np.max(np.abs(coords),axis=0)*1.15

    dir1 = [d*scale1 for d in dir1]
    dir2 = [d*scale2 for d in dir2]

    num_episodes = 25
    train_info = json.load(open(ant_train_points/"info.json"))
    base_checkpoint = str(ant_train_points / checkpoint_names[-1])
    generate_plane_data(base_checkpoint, generated_dirs_dir, dir1, dir2, train_info, grid_size=15, num_episodes=num_episodes)
    run_job_list(generated_dirs_dir+"jobs.sh")
    job_results_to_csv(generated_dirs_dir)
    plot_plane(generated_dirs_dir+"results.csv",
        outname="contours/"+env_name,
        type="contour",
        dir1_name="component 1",
        dir2_name="component 2",
        dir1_scale=scale1,
        dir2_scale=scale2,
        points=coords,
    )
Exemplo n.º 2
0
def replot(csv_fname,
           outname=None,
           env_name=None,
           key_name="episode_rewards",
           title=None,
           plot_type="mesh",
           file_type="png",
           logscale=False):
    default_outname = "vis/" + "".join(
        [c for c in csv_fname if re.match(r'\w', c)]) + key_name
    outname = outname if outname is not None else default_outname
    datafname = csv_fname
    plot_plane(datafname,
               outname,
               key_name=args.key,
               plot_type=plot_type,
               file_type=file_type,
               env_name=env_name,
               logscale=False)
Exemplo n.º 3
0
    parser.add_argument('--dir1_name',
                        type=str,
                        help="Name of the x axis in the plot")
    parser.add_argument('--dir2_name',
                        type=str,
                        help="Name of the y axis in the plot")
    parser.add_argument(
        '--show',
        action='store_true',
        help="Shows plot instead of saving it (does not work for vtp output)")
    parser.add_argument(
        '--logscale',
        default="auto",
        help=
        "Plot in log scale rather than the default linear scale. One of 'auto', 'on', or 'off'"
    )

    args = parser.parse_args()

    plot_plane(args.datafname,
               args.outname,
               env_name=args.env_name,
               key_name=args.key,
               plot_type=args.type,
               dir1_name=args.dir1_name,
               dir2_name=args.dir2_name,
               dir1_scale=args.dir1_scale,
               dir2_scale=args.dir2_scale,
               show=args.show,
               logscale=args.logscale)
Exemplo n.º 4
0
from reward_surfaces.experiments import generate_plane_data
from reward_surfaces.plotting import plot_plane
from reward_surfaces.runners.run_jobs_multiproc import run_job_list
from reward_surfaces.utils.job_results_to_csv import job_results_to_csv
from reward_surfaces.utils.surface_utils import readz
from reward_surfaces.agents.make_agent import make_agent

import json
import os
import shutil

trained_checkpoint = "train_results/bullet/hopper/0040000/"
generated_dirs_dir = "generated_dirs/test_script/"

dir1_fname = "generated_dirs/hopper_eig_vecs/results/0040000/mineigvec.npz"
dir2_fname = "generated_dirs/hopper_eig_vecs/results/0040000/maxeigvec.npz"

dir1 = readz(dir1_fname)
dir2 = readz(dir2_fname)

train_info = json.load(open(trained_checkpoint+"../info.json"))

if os.path.exists(generated_dirs_dir):
    shutil.rmtree(generated_dirs_dir)
print("removed")

generate_plane_data(trained_checkpoint, generated_dirs_dir, dir1, dir2, train_info, num_steps=1000)
run_job_list(generated_dirs_dir+"jobs.sh")
job_results_to_csv(generated_dirs_dir)
plot_plane(generated_dirs_dir+"results.csv")
Exemplo n.º 5
0
        job_results_to_csv(gif_data_folder / checkpoint)

    base_mag = None
    factor = 2
    frame_idx = 0
    for checkpoint in checkpoints:
        csv_path = gif_data_folder / checkpoint / "results.csv"
        row_dat = pandas.read_csv(csv_path)[args.key]
        m = max(0.1, np.max(row_dat))
        mini = min(0, np.min(row_dat))
        print(m)
        if base_mag is None:
            base_mag = m
        else:
            while m > base_mag:
                base_mag = base_mag * factor

        fname = plot_plane(str(csv_path),
                           str(frames_dir + checkpoint),
                           env_name=args.env_name,
                           key_name=args.key,
                           plot_type=args.type,
                           vmin=mini,
                           vmax=base_mag,
                           show=False,
                           logscale="off")
        if not fname:
            continue
        os.rename(fname, f"{frames_dir}{frame_idx:05}.png")
        frame_idx += 1