Пример #1
0
def load_latent_vectors(experiment_directory, filename, lat_vecs):

    full_filename = os.path.join(ws.get_latent_codes_dir(experiment_directory),
                                 filename)

    if not os.path.isfile(full_filename):
        raise Exception(
            'latent state file "{}" does not exist'.format(full_filename))

    data = torch.load(full_filename)

    if isinstance(data["latent_codes"], torch.Tensor):

        # for backwards compatibility
        if not lat_vecs.num_embeddings == data["latent_codes"].size()[0]:
            raise Exception("num latent codes mismatched: {} vs {}".format(
                lat_vecs.num_embeddings, data["latent_codes"].size()[0]))

        if not lat_vecs.embedding_dim == data["latent_codes"].size()[2]:
            raise Exception("latent code dimensionality mismatch")

        for i, lat_vec in enumerate(data["latent_codes"]):
            lat_vecs.weight.data[i, :] = lat_vec

    else:
        lat_vecs.load_state_dict(data["latent_codes"])

    return data["epoch"]
Пример #2
0
def load_latent_vectors(experiment_directory, filename, lat_vecs):

    full_filename = os.path.join(
        ws.get_latent_codes_dir(experiment_directory), filename
    )

    if not os.path.isfile(full_filename):
        raise Exception('latent state file "{}" does not exist'.format(full_filename))

    data = torch.load(full_filename)

    if not len(lat_vecs) == data["latent_codes"].size()[0]:
        raise Exception(
            "num latent codes mismatched: {} vs {}".format(
                len(lat_vecs), data["latent_codes"].size()[2]
            )
        )

    if not lat_vecs[0].size()[1] == data["latent_codes"].size()[2]:
        raise Exception("latent code dimensionality mismatch")

    for i in range(len(lat_vecs)):
        lat_vecs[i] = data["latent_codes"][i].cuda()

    return data["epoch"]
Пример #3
0
def save_latent_vectors(experiment_directory, filename, latent_vec, epoch):
    latent_codes_dir = ws.get_latent_codes_dir(experiment_directory, True)

    all_latents = latent_vec.state_dict()

    torch.save(
        {"epoch": epoch, "latent_codes": all_latents},
        os.path.join(latent_codes_dir, filename),
    )
Пример #4
0
def save_latent_vectors(experiment_directory, filename, latent_vec, epoch):

    latent_codes_dir = ws.get_latent_codes_dir(experiment_directory, True)

    all_latents = torch.zeros(0)
    for l in latent_vec:
        all_latents = torch.cat([all_latents, l.cpu().unsqueeze(0)], 0)

    torch.save(
        {"epoch": epoch, "latent_codes": all_latents},
        os.path.join(latent_codes_dir, filename),
    )
Пример #5
0
    if not os.path.isdir(reconstruction_codes_dir):
        os.makedirs(reconstruction_codes_dir)

    latent_codes = []

    data_root = "/".join(npz_filenames[0].split("/")[:-1]) 

    for npz in args.sample_interp: 
        npz = data_root + "/" + npz + ".npz"
        ii = npz_filenames.index(npz)
        full_filename = os.path.join(args.data_source, ws.sdf_samples_subdir, npz)

        logging.info("reconstructing {}".format(npz))

        err, latent = None, None
        latent = torch.load(os.path.join(ws.get_latent_codes_dir(args.experiment_directory), "latest.pth"))['latent_codes']['weight'].cuda()[ii, :]
        logging.debug("latent: {}".format(latent.detach().cpu().numpy()))
        latent_codes.append(latent)
    
    num_interpol_steps = 12

    for step in range(num_interpol_steps + 1):

        mesh_filename = os.path.join(
            reconstruction_meshes_dir, npz[:-4] + "-" + str(step)
        )
        latent_filename = os.path.join(
            reconstruction_codes_dir, npz[:-4] + "-" + str(step) + ".pth"
        )
        
        decoder.eval()
Пример #6
0
                                               npz[:-4] + ".pth")

            if (args.skip and os.path.isfile(mesh_filename + ".ply")
                    and os.path.isfile(latent_filename)):
                continue

            logging.info("reconstructing {}".format(npz))

            start = time.time()

            use_saved_latent_vector = True
            err, latent = None, None
            if use_saved_latent_vector:
                latent = torch.load(
                    os.path.join(
                        ws.get_latent_codes_dir(args.experiment_directory),
                        "latest.pth"))['latent_codes']['weight'].cuda()[ii, :]
            else:
                data_sdf = deep_sdf.data.read_sdf_samples_into_ram(
                    full_filename)
                data_sdf[0] = data_sdf[0][torch.randperm(data_sdf[0].shape[0])]
                data_sdf[1] = data_sdf[1][torch.randperm(data_sdf[1].shape[0])]

                err, latent = reconstruct(
                    decoder,
                    int(args.iterations),
                    latent_size,
                    data_sdf,
                    0.01,  # [emp_mean,emp_var],
                    0.1,
                    num_samples=8000,