Example #1
0
def main():
    sa = ShapeAssembly()
    lines = sa.load_lines(sys.argv[1])

    # should be shape N x 3
    target_pc = load_point_cloud(sys.argv[2])

    out_file = sys.argv[3]
    hier, param_dict, param_list = sa.make_hier_param_dict(lines)

    opt = torch.optim.Adam(param_list, 0.001)

    start = torch.cat(param_list).clone()

    for iter in range(400):
        verts, faces = sa.diff_run(hier, param_dict)

        samps = sample_surface(faces, verts.unsqueeze(0), 10000)
        closs = cham_loss(samps.squeeze().T.unsqueeze(0).cuda(),
                          target_pc.T.unsqueeze(0).cuda(), 0.0)

        ploss = (torch.cat(param_list) - start).abs().sum()

        loss = closs + ploss.cuda() * 0.001

        opt.zero_grad()
        loss.backward()
        opt.step()

        if iter % 10 == 0:
            writeObj(verts, faces, f'{iter}_' + out_file + '.obj')

    writeObj(verts, faces, out_file + '.obj')
    sa.fill_hier(hier, param_dict)
    writeHierProg(hier, out_file + '.txt')
def create_point_cloud(in_file, out_file):
    sa = ShapeAssembly()
    lines = sa.load_lines(sys.argv[1])
    hier, param_dict, _ = sa.make_hier_param_dict(lines)
    verts, faces = sa.diff_run(hier, param_dict)
    tsamps = sample_surface(faces, verts.unsqueeze(0), 10000).squeeze()
    writePC(tsamps, out_file)
def main():
    sa = ShapeAssembly()
    lines = sa.load_lines(sys.argv[1])

    # should be shape N x 3
    tverts, tfaces = loadObj(sys.argv[2])

    tverts = torch.tensor(tverts)
    tfaces = torch.tensor(tfaces).long()
    tsamps = sample_surface(tfaces, tverts.unsqueeze(0), 10000)

    out_file = sys.argv[3]
    hier, param_dict, param_list = sa.make_hier_param_dict(lines)

    start_time = time.time()
    opt = torch.optim.Adam(param_list, 0.001)

    start = torch.cat(param_list).clone()

    for iter in range(400):
        verts, faces = sa.diff_run(hier, param_dict)

        samps = sample_surface(faces, verts.unsqueeze(0), 10000)
        # tsamps = sample_surface(tfaces, tverts.unsqueeze(0), 10000)
        closs = cham_loss(samps.squeeze().T.unsqueeze(0).cuda(),
                          tsamps.squeeze().T.unsqueeze(0).cuda(), 0.0)

        ploss = (torch.cat(param_list) - start).abs().sum()

        loss = closs + ploss.cuda() * 0.001
        print(float(loss))

        opt.zero_grad()
        loss.backward()
        opt.step()

        # if iter % 10 == 0:
        #     writeObj(verts, faces, f'{iter}_' + out_file + '.obj')

    end_time = time.time()
    print(f"TIME: {end_time-start_time}")

    writeObj(verts, faces, out_file + '.obj')
    sa.fill_hier(hier, param_dict)
    writeHierProg(hier, out_file + '.txt')
Example #4
0
def prog_to_pc(prog):
    verts, faces = hier_execute(prog)
    for i in range(3):
        verts[:, i] = verts[:, i] - verts[:, i].mean()
    pc = utils.sample_surface(faces,
                              verts.unsqueeze(0),
                              num_samps,
                              return_normals=False)[0]
    return pc
        def __getitem__(self, index):
            verts, faces = self.datas[index]
            for i in range(3):
                verts[:, i] = verts[:, i] - verts[:, i].mean()
            inputs = sample_surface(faces,
                                    verts.unsqueeze(0),
                                    2500,
                                    return_normals=False)[0]

            return inputs
Example #6
0
def prog_to_pc(prog, ns):
    if prog['prog'] == []:
        return torch.zeros((ns, 3))
    verts, faces = hier_execute(prog)
    for i in range(3):
        verts[:, i] = verts[:, i] - verts[:, i].mean()
    pc = utils.sample_surface(faces,
                              verts.unsqueeze(0),
                              ns,
                              return_normals=False)[0]
    return pc
Example #7
0
def lines_to_pc(lines):
    P = Program()
    for i, line in enumerate(lines):
        P.execute(line)
    verts, faces = P.getShapeGeo()
    for i in range(3):
        verts[:, i] = verts[:, i] - verts[:, i].mean()
    pc = utils.sample_surface(faces,
                              verts.unsqueeze(0),
                              num_samps,
                              return_normals=False)[0]
    return pc
Example #8
0
def reencode_prog(full_pc, partial_prog, ns, encoder):
    full_labeled = full_pc.transpose(1, 0)
    pverts, pfaces = partial_prog.getShapeGeo()
    if pverts == None:
        partial_pc = torch.zeros((ns // 2, 3))
    else:
        for i in range(3):
            pverts[:, i] = pverts[:, i] - pverts[:, i].mean()
        partial_pc = utils.sample_surface(pfaces,
                                          pverts.unsqueeze(0),
                                          ns // 2,
                                          return_normals=False)[0]
    partial_labels = torch.ones((ns // 2, 1))
    partial_labeled = torch.cat([partial_pc, partial_labels], 1).to(device)

    pc = torch.cat([full_labeled, partial_labeled], 0)
    pc = pc.transpose(1, 0).unsqueeze(0)
    return encoder(pc).unsqueeze(0)
Example #9
0
def eval_recon(outdir, data_inds):
    decoder = torch.load(shapeAssembly_decoder).to(device)
    decoder.eval()
    encoder = PCEncoder()
    encoder.load_state_dict(torch.load(point_cloud_encoder))
    encoder.eval()
    encoder.to(device)

    os.system(f'mkdir {outdir}')

    count = 0.
    tdist = 0.

    for ind in tqdm(data_inds):

        pc_samp = torch.load(f'{point_cloud_folder}/{ind}.pts').to(device)
        enc = encoder(pc_samp.unsqueeze(0))
        prog, _ = run_eval_decoder(enc.unsqueeze(0), decoder, False)
        verts, faces = hier_execute(prog)

        utils.writeObj(verts, faces, f"{outdir}/{ind}.obj")
        utils.writeHierProg(prog, f"{outdir}/{ind}.txt")

        verts = verts.to(device)
        faces = faces.to(device)

        pred_samp = utils.sample_surface(faces, verts.unsqueeze(0), 10000,
                                         True)

        # Center PC

        offset = (pc_samp.max(dim=0).values + pc_samp.min(dim=0).values) / 2
        pc_samp -= offset

        #utils.writeSPC(pc_samp,f'tar_pc_{ind}.obj')
        #utils.writeSPC(pred_samp[0,:,:3],f'scripts/output/pred_pc_{ind}.obj')

        pc_samp = pc_samp.repeat(1, 2).unsqueeze(0)
        tdist += fscore.score(pred_samp.squeeze().T.unsqueeze(0),
                              pc_samp.squeeze().T.unsqueeze(0))
        count += 1

    print(f"Average F-score: {tdist/count}")
Example #10
0
def getFScore(verts, faces, gt_verts, gt_faces):
    p_samps = utils.sample_surface(faces, verts.unsqueeze(0), NUM_SAMPS)
    t_samps = utils.sample_surface(gt_faces, gt_verts.unsqueeze(0), NUM_SAMPS)

    return fscore.score(p_samps.squeeze().T.unsqueeze(0),
                        t_samps.squeeze().T.unsqueeze(0))
Example #11
0
    os.system(f'mkdir {outdir}')

    for ind in inds:
        count += 1.

        try:
            hier = jp.parseJsonToHier(ind, CATEGORY)
            nshier = jp.parseJsonToHier(ind, CATEGORY, True)

            gen.generate_program(hier)

            pverts, pfaces = hier_execute(hier)
            tverts, tfaces = get_gt_geom(nshier, False)

            tsamps = utils.sample_surface(tfaces, tverts.unsqueeze(0), 10000)

            try:

                psamps = utils.sample_surface(pfaces, pverts.unsqueeze(0),
                                              10000)

                pfs = fscore.score(psamps.squeeze().T.unsqueeze(0),
                                   tsamps.squeeze().T.unsqueeze(0))

            except Exception:
                pfs = 0.

            if pfs >= 50:
                gpfsv += 1.
Example #12
0
def fit(prog_path, obj_path, out_path):
    progs = os.listdir(prog_path)
    objs = os.listdir(obj_path)
    fitted_progs = []
    for i, prg in enumerate(progs):
        print(f"fitting program {i}")
        sa = ShapeAssembly()
        p_no_e = prg.split("_")[1]
        index = int(p_no_e.split(".")[0])

        # should be shape N x 3
        tverts, tfaces = loadObj(f"{obj_path}/{index}.obj")

        tverts = torch.tensor(tverts)
        tfaces = torch.tensor(tfaces).long()

        out_file = f"{out_path}/{index}"
        with open(f"{prog_path}/{prg}") as file:
            lines = file.readlines()
        hier, param_dict, param_list = sa.make_hier_param_dict(lines)

        opt = torch.optim.Adam(param_list, 0.001)

        start = torch.cat(param_list).clone()

        for iter in range(400):
            verts, faces = sa.diff_run(hier, param_dict)

            samps = sample_surface(faces, verts.unsqueeze(0), 10000)
            tsamps = sample_surface(tfaces, tverts.unsqueeze(0), 10000)
            closs = cham_loss(samps.squeeze().T.unsqueeze(0).cuda(),
                              tsamps.squeeze().T.unsqueeze(0).cuda(), 0.0)

            ploss = (torch.cat(param_list) - start).abs().sum()

            loss = closs + ploss.cuda() * 0.001

            opt.zero_grad()
            loss.backward()
            opt.step()

        # # prevent cuboids from having 0 dimensions
        new_param_dict = {}
        for p in param_dict:
            new_p = []
            for param in param_dict[p]:
                if param[0] == "Cuboid":
                    new_attrs = []
                    for attr in param[1]:
                        if torch.is_tensor(attr):
                            new_attr = torch.clamp(attr, min=0.01).detach()
                            new_attrs.append(new_attr)
                        else:
                            new_attrs.append(attr)
                    new_p.append((param[0], new_attrs))
                else:
                    new_p.append(param)
            new_param_dict[p] = new_p

        sa.fill_hier(hier, new_param_dict)
        verts, faces = hier_execute(hier)

        writeObj(verts, faces, out_file + '.obj')
        writeHierProg(hier, out_file + '.txt')

        fitted_progs.append((hier, index))

    return fitted_progs