Example #1
0
def main():
    sa = ShapeAssembly()
    lines = sa.load_lines(sys.argv[1])

    # should be shape N x 3
    target_pc = load_point_cloud(sys.argv[2])

    out_file = sys.argv[3]
    hier, param_dict, param_list = sa.make_hier_param_dict(lines)

    opt = torch.optim.Adam(param_list, 0.001)

    start = torch.cat(param_list).clone()

    for iter in range(400):
        verts, faces = sa.diff_run(hier, param_dict)

        samps = sample_surface(faces, verts.unsqueeze(0), 10000)
        closs = cham_loss(samps.squeeze().T.unsqueeze(0).cuda(),
                          target_pc.T.unsqueeze(0).cuda(), 0.0)

        ploss = (torch.cat(param_list) - start).abs().sum()

        loss = closs + ploss.cuda() * 0.001

        opt.zero_grad()
        loss.backward()
        opt.step()

        if iter % 10 == 0:
            writeObj(verts, faces, f'{iter}_' + out_file + '.obj')

    writeObj(verts, faces, out_file + '.obj')
    sa.fill_hier(hier, param_dict)
    writeHierProg(hier, out_file + '.txt')
Example #2
0
def main(ind):
    sa = ShapeAssembly()
    root_lines, has_mid = make_skel()

    prog_lines = ['Assembly Program_0 {']
    for b in root_lines:
        prog_lines.append('\t'+b)
    prog_lines.append('}')

    RP = Program()

    for l in root_lines:
        RP.execute(l)

    base_par = 'mid' if has_mid else 'top'

    base_lines = make_base_prog(
        RP.cuboids['base'],
        RP.cuboids[base_par]
    )

    for i in range(len(prog_lines)):
        prog_lines[i] = prog_lines[i].replace('base', 'Program_1')



    prog_lines += base_lines

    #for b in base_lines:
    #    prog_lines.append('\t'+b)
    #prog_lines.append('}')

    cube_count = -1
    switches = []
    for line in prog_lines:
        if 'Cuboid' in line:
            if not ('Program_' in line or "bbox" in line):
                switches.append((
                    f'cube{cube_count}', line.split()[0]
                ))
            if "bbox" in line:
                cube_count = -1

            cube_count += 1
    for a, b in switches:
        prog_lines = [line.replace(b,a) for line in prog_lines]

    hier_prog = make_hier_prog(prog_lines)
    verts, faces = hier_execute(hier_prog)
    if check_rooted(verts, faces) and check_stability(verts, faces):
        # writeObj(verts, faces, f'out_{ind}.obj')
        writeHierProg(hier_prog, f"random_hier_data/{ind}.txt")
        return True
    return False
Example #3
0
def run_generate(args):
    os.system(f'mkdir {outpath}/gen_{args.exp_name} > /dev/null 2>&1')

    decoder = torch.load(
        f"{outpath}/{args.model_name}/models/decoder_{args.load_epoch}.pt").to(
            device)
    encoder = torch.load(
        f"{outpath}/{args.model_name}/models/encoder_{args.load_epoch}.pt").to(
            device)

    random.seed(args.rd_seed)
    np.random.seed(args.rd_seed)
    torch.manual_seed(args.rd_seed)

    with torch.no_grad():
        if args.mode == "eval_gen":
            i = 0
            miss = 0
            while (i < args.num_gen):
                print(f"Gen {i}")
                try:
                    h0 = torch.randn(1, 1, args.hidden_dim).to(device)
                    prog, _ = run_eval_decoder(h0, decoder, True)
                    verts, faces = hier_execute(prog)

                    utils.writeObj(
                        verts, faces,
                        f"{outpath}/gen_{args.exp_name}/gen_{i}.obj")
                    utils.writeHierProg(
                        prog,
                        f"{outpath}/gen_{args.exp_name}/gen_prog_{i}.txt")
                    i += 1

                except Exception as e:
                    print(f"Failed to generate prog with {e}")
                    miss += 1

            print(f"Gen reject %: {miss / (args.num_gen + miss)}")

        if args.mode == "eval_recon":
            ind_file = f'data_splits/{args.category}/val.txt'
            inds = getInds(ind_file)
            for ind in tqdm(inds):
                gtprog = utils.loadHPFromFile(f'{args.dataset_path}/{ind}.txt')
                gtverts, gtfaces = hier_execute(gtprog)
                shape = progToData(gtprog)
                enc, _ = get_encoding(shape, encoder, mle=True)
                prog, _ = run_eval_decoder(enc, decoder, False)
                verts, faces = hier_execute(prog)
                utils.writeObj(
                    verts, faces,
                    f"{outpath}/gen_{args.exp_name}/{ind}_recon.obj")
                utils.writeObj(gtverts, gtfaces,
                               f"{outpath}/gen_{args.exp_name}/{ind}_gt.obj")
def main():
    sa = ShapeAssembly()
    lines = sa.load_lines(sys.argv[1])

    # should be shape N x 3
    tverts, tfaces = loadObj(sys.argv[2])

    tverts = torch.tensor(tverts)
    tfaces = torch.tensor(tfaces).long()
    tsamps = sample_surface(tfaces, tverts.unsqueeze(0), 10000)

    out_file = sys.argv[3]
    hier, param_dict, param_list = sa.make_hier_param_dict(lines)

    start_time = time.time()
    opt = torch.optim.Adam(param_list, 0.001)

    start = torch.cat(param_list).clone()

    for iter in range(400):
        verts, faces = sa.diff_run(hier, param_dict)

        samps = sample_surface(faces, verts.unsqueeze(0), 10000)
        # tsamps = sample_surface(tfaces, tverts.unsqueeze(0), 10000)
        closs = cham_loss(samps.squeeze().T.unsqueeze(0).cuda(),
                          tsamps.squeeze().T.unsqueeze(0).cuda(), 0.0)

        ploss = (torch.cat(param_list) - start).abs().sum()

        loss = closs + ploss.cuda() * 0.001
        print(float(loss))

        opt.zero_grad()
        loss.backward()
        opt.step()

        # if iter % 10 == 0:
        #     writeObj(verts, faces, f'{iter}_' + out_file + '.obj')

    end_time = time.time()
    print(f"TIME: {end_time-start_time}")

    writeObj(verts, faces, out_file + '.obj')
    sa.fill_hier(hier, param_dict)
    writeHierProg(hier, out_file + '.txt')
Example #5
0
def eval_recon(outdir, data_inds):
    decoder = torch.load(shapeAssembly_decoder).to(device)
    decoder.eval()
    encoder = PCEncoder()
    encoder.load_state_dict(torch.load(point_cloud_encoder))
    encoder.eval()
    encoder.to(device)

    os.system(f'mkdir {outdir}')

    count = 0.
    tdist = 0.

    for ind in tqdm(data_inds):

        pc_samp = torch.load(f'{point_cloud_folder}/{ind}.pts').to(device)
        enc = encoder(pc_samp.unsqueeze(0))
        prog, _ = run_eval_decoder(enc.unsqueeze(0), decoder, False)
        verts, faces = hier_execute(prog)

        utils.writeObj(verts, faces, f"{outdir}/{ind}.obj")
        utils.writeHierProg(prog, f"{outdir}/{ind}.txt")

        verts = verts.to(device)
        faces = faces.to(device)

        pred_samp = utils.sample_surface(faces, verts.unsqueeze(0), 10000,
                                         True)

        # Center PC

        offset = (pc_samp.max(dim=0).values + pc_samp.min(dim=0).values) / 2
        pc_samp -= offset

        #utils.writeSPC(pc_samp,f'tar_pc_{ind}.obj')
        #utils.writeSPC(pred_samp[0,:,:3],f'scripts/output/pred_pc_{ind}.obj')

        pc_samp = pc_samp.repeat(1, 2).unsqueeze(0)
        tdist += fscore.score(pred_samp.squeeze().T.unsqueeze(0),
                              pc_samp.squeeze().T.unsqueeze(0))
        count += 1

    print(f"Average F-score: {tdist/count}")
Example #6
0
def main(dataset_path, outdir):
    indices, progs = load_progs(dataset_path)

    os.system(f'mkdir {outdir}')
    os.system(f'mkdir {outdir}/valid')
    os.system(f'mkdir {outdir}/non_valid')

    count = 0

    for ind, prog in tqdm(list(zip(indices, progs))):
        count += 1

        if count < 0:
            continue

        lc = simplifyHP(prog)
        verts, faces = hier_execute(prog)

        bbdims = torch.tensor([
            float(a)
            for a in re.split(r'[()]', prog['prog'][0])[1].split(',')[:3]
        ])

        bb_viol = (verts.abs().max(dim=0).values / (bbdims / 2)).max()

        try:
            rooted = check_rooted(verts, faces)
        except Exception as e:
            print(f"Failed rooted check with {e}")
            rooted = False

        if DO_STABLE:
            stable = check_stability(verts, faces)
        else:
            stable = True

        if lc <= MAX_LEAVES and lc >= MIN_LEAVES and bb_viol < BB_THRESH and rooted and stable:
            utils.writeHierProg(prog, f'{outdir}/valid/{ind}.txt')
            utils.writeObj(verts, faces, f'{outdir}/valid/{ind}.obj')
        else:
            utils.writeHierProg(prog, f'{outdir}/non_valid/{ind}.txt')
            utils.writeObj(verts, faces, f'{outdir}/non_valid/{ind}.obj')
Example #7
0
def gen_metrics(gen_progs,
                outpath,
                exp_name,
                epoch,
                VERBOSE,
                write_progs=True):
    misses = 0.
    results = {
        'num_parts': [],
        'rootedness': [],
        'stability': [],
    }

    samples = []

    for i, prog in enumerate(gen_progs):
        try:
            verts, faces = hier_execute(prog)
            assert not torch.isnan(verts).any(), 'saw nan vert'
            if write_progs:
                utils.writeObj(
                    verts, faces,
                    f"{outpath}/{exp_name}/objs/gen/{epoch}_{i}.obj")
                utils.writeHierProg(
                    prog, f"{outpath}/{exp_name}/programs/gen/{epoch}_{i}.txt")

            results['num_parts'].append(verts.shape[0] / 8.0)
            samples.append((verts, faces))

        except Exception as e:
            misses += 1.
            if VERBOSE:
                print(f"failed gen metrics for {i} with {e}")
            continue

        try:
            if check_rooted(verts, faces):
                results['rootedness'].append(1.)
            else:
                results['rootedness'].append(0.)

            if check_stability(verts, faces):
                results['stability'].append(1.)
            else:
                results['stability'].append(0.)

        except Exception as e:
            if VERBOSE:
                print(f"failed rooted/stable with {e}")

    for key in results:
        if len(results[key]) > 0:
            res = torch.tensor(results[key]).mean().item()
        else:
            res = 0.

        results[key] = res

    try:
        results['variance'] = eval_get_var(samples)
    except Exception as e:
        results['variance'] = 0.
        if VERBOSE:
            print(f"failed getting variance with {e}")

    return results, misses
Example #8
0
def recon_metrics(recon_sets, outpath, exp_name, name, epoch, VERBOSE):
    misses = 0.
    results = {
        'fscores': [],
        'iou_shape': [],
        'param_dist_parts': [],
    }

    for prog, gt_prog, prog_ind in recon_sets:

        bbox = getBBox(gt_prog)

        gt_verts, gt_faces, gt_hscene = hier_execute(gt_prog, return_all=True)

        gt_cubes = [[CuboidToParams(c) for c in scene] for scene in gt_hscene]

        try:
            verts, faces, hscene = hier_execute(prog, return_all=True)
            cubes = [[CuboidToParams(c) for c in scene] for scene in hscene]

            assert not torch.isnan(verts).any(), 'saw nan vert'

        except Exception as e:
            misses += 1.
            if VERBOSE:
                print(f"failed recon metrics for {prog_ind} with {e}")
            continue

        verts = verts.to(device)
        gt_verts = gt_verts.to(device)
        faces = faces.to(device)
        gt_faces = gt_faces.to(device)

        gt_objs = os.listdir(f"{outpath}/{exp_name}/objs/gt/")

        if f"{prog_ind}.obj" not in gt_objs:
            utils.writeObj(gt_verts, gt_faces,
                           f"{outpath}/{exp_name}/objs/gt/{prog_ind}.obj")
            utils.writeHierProg(
                gt_prog, f"{outpath}/{exp_name}/programs/gt/{prog_ind}.txt")

        try:
            utils.writeObj(
                verts, faces,
                f"{outpath}/{exp_name}/objs/{name}/{epoch}_{prog_ind}.obj")
            utils.writeHierProg(
                prog,
                f"{outpath}/{exp_name}/programs/{name}/{epoch}_{prog_ind}.txt")

        except Exception as e:
            print(f"Failed writing prog/obj for {prog_ind} with {e}")

        try:
            fs = getFScore(verts, faces, gt_verts, gt_faces)
            if fs is not None:
                results['fscores'].append(fs)
        except Exception as e:
            if VERBOSE:
                print(f"failed Fscore for {prog_ind} with {e}")

        try:
            siou = getShapeIoU(cubes, gt_cubes, bbox)
            if siou is not None:
                results['iou_shape'].append(siou)
        except Exception as e:
            if VERBOSE:
                print(f"failed Shape Iou for {prog_ind} with {e}")

        try:
            pd = getParamDist(cubes, gt_cubes, bbox)
            if pd is not None:
                results['param_dist_parts'].append(pd)
        except Exception as e:
            if VERBOSE:
                print(f"failed param dist for {prog_ind} with {e}")

    for key in results:
        if len(results[key]) > 0:
            res = torch.tensor(results[key]).mean().item()
        else:
            res = 0.

        results[key] = res

    return results, misses
Example #9
0
    q += next_q
    while len(q) > 0:
        prog, hier_index, bbox_dims = q.pop(0)
        next_q = rand_program(prog, num_cuboids, bbox_dims, hier_index)
        if next_q is None:
            prog.pop('prog')
            prog.pop('children')
            num_prev_children = 0
        else:
            q += next_q
            num_prev_children = len(prog['children'])
        num_cuboids = num_cuboids - num_prev_children + 2

        # if not enough remaining cuboids to keep expanding
        # pull all children out of q and make them leaves
        if not num_cuboids > 2:
            while len(q) > 0:
                prog, hier_index, bbox_dims = q.pop(0)
                prog.pop('prog')
                prog.pop('children')
            break

    return hier_prog


for n in range(10):
    prog = rand_hier_program()
    verts, faces = hier_execute(prog)
    writeObj(verts, faces, f'{n}.obj')
    writeHierProg(prog, f"{n}.txt")
Example #10
0
            tsamps = utils.sample_surface(tfaces, tverts.unsqueeze(0), 10000)

            try:

                psamps = utils.sample_surface(pfaces, pverts.unsqueeze(0),
                                              10000)

                pfs = fscore.score(psamps.squeeze().T.unsqueeze(0),
                                   tsamps.squeeze().T.unsqueeze(0))

            except Exception:
                pfs = 0.

            if pfs >= 50:
                gpfsv += 1.

            if pfs >= 75:
                gpcfsv += 1.
                utils.writeHierProg(hier, f"{outdir}/{ind}.txt")
                utils.writeObj(tverts, tfaces, f"{outdir}/{ind}_target.obj")
                utils.writeObj(pverts, pfaces, f"{outdir}/{ind}_parse.obj")

            print(
                f"CAT {CATEGORY}, P: {ind}, C: {count}| Greedy | Parse -- FS {round(gpfsv/count, 3)}, HFS {round(gpcfsv/count, 3)} | Errors {errors/count}"
            )

        except Exception as e:
            if str(e) != 'disconnected graph':
                errors += 1.
            print(f"Prog {ind} -> ERROR {e}")
Example #11
0
def fit(prog_path, obj_path, out_path):
    progs = os.listdir(prog_path)
    objs = os.listdir(obj_path)
    fitted_progs = []
    for i, prg in enumerate(progs):
        print(f"fitting program {i}")
        sa = ShapeAssembly()
        p_no_e = prg.split("_")[1]
        index = int(p_no_e.split(".")[0])

        # should be shape N x 3
        tverts, tfaces = loadObj(f"{obj_path}/{index}.obj")

        tverts = torch.tensor(tverts)
        tfaces = torch.tensor(tfaces).long()

        out_file = f"{out_path}/{index}"
        with open(f"{prog_path}/{prg}") as file:
            lines = file.readlines()
        hier, param_dict, param_list = sa.make_hier_param_dict(lines)

        opt = torch.optim.Adam(param_list, 0.001)

        start = torch.cat(param_list).clone()

        for iter in range(400):
            verts, faces = sa.diff_run(hier, param_dict)

            samps = sample_surface(faces, verts.unsqueeze(0), 10000)
            tsamps = sample_surface(tfaces, tverts.unsqueeze(0), 10000)
            closs = cham_loss(samps.squeeze().T.unsqueeze(0).cuda(),
                              tsamps.squeeze().T.unsqueeze(0).cuda(), 0.0)

            ploss = (torch.cat(param_list) - start).abs().sum()

            loss = closs + ploss.cuda() * 0.001

            opt.zero_grad()
            loss.backward()
            opt.step()

        # # prevent cuboids from having 0 dimensions
        new_param_dict = {}
        for p in param_dict:
            new_p = []
            for param in param_dict[p]:
                if param[0] == "Cuboid":
                    new_attrs = []
                    for attr in param[1]:
                        if torch.is_tensor(attr):
                            new_attr = torch.clamp(attr, min=0.01).detach()
                            new_attrs.append(new_attr)
                        else:
                            new_attrs.append(attr)
                    new_p.append((param[0], new_attrs))
                else:
                    new_p.append(param)
            new_param_dict[p] = new_p

        sa.fill_hier(hier, new_param_dict)
        verts, faces = hier_execute(hier)

        writeObj(verts, faces, out_file + '.obj')
        writeHierProg(hier, out_file + '.txt')

        fitted_progs.append((hier, index))

    return fitted_progs
#     with open(f"random_data_fixed/{f}", "w") as file:
#         for l in new_lines:
#             file.write(l)

files = os.listdir("random_hier_data")
P = Program()

for f in files:
    prog = loadHPFromFile(f"random_hier_data/{f}")

    def fix_lines(prog):
        def order(l):
            if "Cuboid(" in l:
                name = P.parseCuboid(l)[0]
                if name == "bbox":
                    return 0
                else:
                    return int(name[4:]) + 1
            elif ("reflect" in l) or ("translate" in l):
                return 1000
            else:
                return 100

        prog['prog'].sort(key=order)
        for c in prog['children']:
            if not c == {}:
                fix_lines(c)

    fix_lines(prog)
    writeHierProg(prog, f"random_hier_data_fixed/{f}")
Example #13
0
#     p_dict['prog'] = new_lines
#     writeHierProg(p_dict, f"reorder/{p}")

progs = os.listdir("data/chair/")

for p in progs:
    with open(f"data/chair/{p}", "r") as file:
        lines = file.readlines()
    p_dict = make_hier_prog(lines)

    def reorder_recur(prog):
        cuboid_lines = [x for x in prog['prog'] if "Cuboid" in x]
        non_cuboid_lines = [x for x in prog['prog'] if not "Cuboid" in x]
        new_lines = copy(non_cuboid_lines)
        for line in cuboid_lines:
            name = line.split("=")[0][:-1]
            relevant_lines = [x for x in non_cuboid_lines if name in x]
            insert_idx = new_lines.index(relevant_lines[0])
            new_lines.insert(insert_idx, line)
        prog['prog'] = new_lines
        new_cuboid_lines = [x for x in new_lines if "Cuboid" in x]
        new_order = [new_cuboid_lines.index(x) for x in cuboid_lines]
        temp_children = sorted(zip(new_order, prog['children']))
        prog['children'] = [x[1] for x in temp_children]
        for child in prog['children']:
            if not child == {}:
                reorder_recur(child)

    reorder_recur(p_dict)
    writeHierProg(p_dict, f"data/reorder/{p}")