Beispiel #1
0
        log_path = log_path.replace(os.path.expanduser("~/navrep"),
                                    "/tmp/navrep")

    make_dir_if_not_exists(os.path.dirname(model_save_path))
    make_dir_if_not_exists(os.path.dirname(log_path))

    # create network
    reset_graph()
    vae = ConvVAE(z_size=_Z,
                  batch_size=batch_size,
                  is_training=True,
                  reuse=False)
    vae.print_trainable_params()

    # create training dataset
    dataset = archive_to_lidar_dataset(dataset_dir)
    if len(dataset) == 0:
        raise ValueError("no scans found, exiting")
    print(len(dataset), "scans in dataset.")

    # split into batches:
    total_length = len(dataset)
    num_batches = int(np.floor(total_length / batch_size))

    # rings converter
    rings_def = generate_rings(64, 64)

    # train loop:
    print("train", "step", "loss", "recon_loss", "kl_loss")
    values_logs = None
    start = time.time()
Beispiel #2
0
NUM_EPOCH = 100
DATA_DIR = "record"
HOME = os.path.expanduser("~")
MAX_LIDAR_DIST = 25.

siren_model_path = os.path.expanduser("~/navrep/models/V/siren1d.json")

# create network
reset_graph()
siren = SIREN1D(batch_size=1080, is_training=False)

# load
siren.load_json(siren_model_path)

# create training dataset
dataset = archive_to_lidar_dataset("~/navrep/datasets/V/ian", limit=180)
if len(dataset) == 0:
    raise ValueError("no scans found, exiting")
print(len(dataset), "scans in dataset.")
dataset = dataset[:500000:100]

# split into batches:
total_length = len(dataset)
num_batches = int(np.floor(total_length / batch_size))

dummy_msg = LaserScan()
dummy_msg.range_max = 100.0
dummy_msg.ranges = range(1080)

ring_accuracy_per_example = np.ones((len(dataset), )) * -1
for idx in range(num_batches):
Beispiel #3
0
NUM_EPOCH = 10
DATA_DIR = "record"
HOME = os.path.expanduser("~")
MAX_LIDAR_DIST = 25.0

model_save_dir = HOME + "/navrep/models/V"
model_save_path = os.path.join(model_save_dir, "siren1d.json")
if not os.path.exists(model_save_dir):
    os.makedirs(model_save_dir)

# create network
reset_graph()
siren = SIREN1D(batch_size=batch_size, is_training=True, reuse=False)

# create training dataset
dataset = archive_to_lidar_dataset(limit=1)
if len(dataset) == 0:
    raise ValueError("no scans found, exiting")
print(len(dataset), "scans in dataset.")
n_scans = len(dataset)

all_obs = np.clip(
    dataset.astype(np.float) / MAX_LIDAR_DIST, 0.0, MAX_LIDAR_DIST)
train_o = all_obs[:, None, :].reshape(n_scans, 1, 1080, 1)
train_x = np.arange(1080)[None, :].reshape(1, 1080, 1)  # TODO: divide by 1080
train_y = all_obs.reshape((n_scans, 1080, 1))

# batch = sample from (n_scans, n_points)
all_indices = np.array(np.where(all_obs)).T

# split into batches: