Ejemplo n.º 1
0
def run_generate(args):
    os.system(f'mkdir {outpath}/gen_{args.exp_name} > /dev/null 2>&1')

    decoder = torch.load(
        f"{outpath}/{args.model_name}/models/decoder_{args.load_epoch}.pt").to(
            device)
    encoder = torch.load(
        f"{outpath}/{args.model_name}/models/encoder_{args.load_epoch}.pt").to(
            device)

    random.seed(args.rd_seed)
    np.random.seed(args.rd_seed)
    torch.manual_seed(args.rd_seed)

    with torch.no_grad():
        if args.mode == "eval_gen":
            i = 0
            miss = 0
            while (i < args.num_gen):
                print(f"Gen {i}")
                try:
                    h0 = torch.randn(1, 1, args.hidden_dim).to(device)
                    prog, _ = run_eval_decoder(h0, decoder, True)
                    verts, faces = hier_execute(prog)

                    utils.writeObj(
                        verts, faces,
                        f"{outpath}/gen_{args.exp_name}/gen_{i}.obj")
                    utils.writeHierProg(
                        prog,
                        f"{outpath}/gen_{args.exp_name}/gen_prog_{i}.txt")
                    i += 1

                except Exception as e:
                    print(f"Failed to generate prog with {e}")
                    miss += 1

            print(f"Gen reject %: {miss / (args.num_gen + miss)}")

        if args.mode == "eval_recon":
            ind_file = f'data_splits/{args.category}/val.txt'
            inds = getInds(ind_file)
            for ind in tqdm(inds):
                gtprog = utils.loadHPFromFile(f'{args.dataset_path}/{ind}.txt')
                gtverts, gtfaces = hier_execute(gtprog)
                shape = progToData(gtprog)
                enc, _ = get_encoding(shape, encoder, mle=True)
                prog, _ = run_eval_decoder(enc, decoder, False)
                verts, faces = hier_execute(prog)
                utils.writeObj(
                    verts, faces,
                    f"{outpath}/gen_{args.exp_name}/{ind}_recon.obj")
                utils.writeObj(gtverts, gtfaces,
                               f"{outpath}/gen_{args.exp_name}/{ind}_gt.obj")
Ejemplo n.º 2
0
def prog_to_pc(prog):
    verts, faces = hier_execute(prog)
    for i in range(3):
        verts[:, i] = verts[:, i] - verts[:, i].mean()
    pc = utils.sample_surface(faces,
                              verts.unsqueeze(0),
                              num_samps,
                              return_normals=False)[0]
    return pc
Ejemplo n.º 3
0
def calc_tab3():
    ddir = sys.argv[1]
    inds = os.listdir(ddir)
    outs = []

    if len(sys.argv) > 2:
        inds = inds[:int(sys.argv[2])]

    for ind in tqdm(inds):
        if '.txt' in ind:
            hp = utils.loadHPFromFile(f'{ddir}/{ind}')
            verts, faces = hier_execute(hp)
        else:
            verts, faces = utils.loadObj(f'{ddir}/{ind}')
            verts = torch.tensor(verts)
            faces = torch.tensor(faces)
        outs.append((verts, faces))

    misses = 0.
    results = {
        'num_parts': [],
        'rootedness': [],
        'stability': [],
    }

    samples = []

    for (verts, faces) in tqdm(outs):

        results['num_parts'].append(verts.shape[0] / 8.0)
        samples.append((verts, faces))

        if check_rooted(verts, faces):
            results['rootedness'].append(1.)
        else:
            results['rootedness'].append(0.)

        if check_stability(verts, faces):
            results['stability'].append(1.)
        else:
            results['stability'].append(0.)

    for key in results:
        if len(results[key]) > 0:
            res = torch.tensor(results[key]).mean().item()
        else:
            res = 0.

        results[key] = res

    results['variance'] = eval_get_var(samples)

    for key in results:
        print(f"Result {key} : {results[key]}")
Ejemplo n.º 4
0
def main(ind):
    sa = ShapeAssembly()
    root_lines, has_mid = make_skel()

    prog_lines = ['Assembly Program_0 {']
    for b in root_lines:
        prog_lines.append('\t'+b)
    prog_lines.append('}')

    RP = Program()

    for l in root_lines:
        RP.execute(l)

    base_par = 'mid' if has_mid else 'top'

    base_lines = make_base_prog(
        RP.cuboids['base'],
        RP.cuboids[base_par]
    )

    for i in range(len(prog_lines)):
        prog_lines[i] = prog_lines[i].replace('base', 'Program_1')



    prog_lines += base_lines

    #for b in base_lines:
    #    prog_lines.append('\t'+b)
    #prog_lines.append('}')

    cube_count = -1
    switches = []
    for line in prog_lines:
        if 'Cuboid' in line:
            if not ('Program_' in line or "bbox" in line):
                switches.append((
                    f'cube{cube_count}', line.split()[0]
                ))
            if "bbox" in line:
                cube_count = -1

            cube_count += 1
    for a, b in switches:
        prog_lines = [line.replace(b,a) for line in prog_lines]

    hier_prog = make_hier_prog(prog_lines)
    verts, faces = hier_execute(hier_prog)
    if check_rooted(verts, faces) and check_stability(verts, faces):
        # writeObj(verts, faces, f'out_{ind}.obj')
        writeHierProg(hier_prog, f"random_hier_data/{ind}.txt")
        return True
    return False
Ejemplo n.º 5
0
def prog_to_pc(prog, ns):
    if prog['prog'] == []:
        return torch.zeros((ns, 3))
    verts, faces = hier_execute(prog)
    for i in range(3):
        verts[:, i] = verts[:, i] - verts[:, i].mean()
    pc = utils.sample_surface(faces,
                              verts.unsqueeze(0),
                              ns,
                              return_normals=False)[0]
    return pc
Ejemplo n.º 6
0
def eval_recon(outdir, data_inds):
    decoder = torch.load(shapeAssembly_decoder).to(device)
    decoder.eval()
    encoder = PCEncoder()
    encoder.load_state_dict(torch.load(point_cloud_encoder))
    encoder.eval()
    encoder.to(device)

    os.system(f'mkdir {outdir}')

    count = 0.
    tdist = 0.

    for ind in tqdm(data_inds):

        pc_samp = torch.load(f'{point_cloud_folder}/{ind}.pts').to(device)
        enc = encoder(pc_samp.unsqueeze(0))
        prog, _ = run_eval_decoder(enc.unsqueeze(0), decoder, False)
        verts, faces = hier_execute(prog)

        utils.writeObj(verts, faces, f"{outdir}/{ind}.obj")
        utils.writeHierProg(prog, f"{outdir}/{ind}.txt")

        verts = verts.to(device)
        faces = faces.to(device)

        pred_samp = utils.sample_surface(faces, verts.unsqueeze(0), 10000,
                                         True)

        # Center PC

        offset = (pc_samp.max(dim=0).values + pc_samp.min(dim=0).values) / 2
        pc_samp -= offset

        #utils.writeSPC(pc_samp,f'tar_pc_{ind}.obj')
        #utils.writeSPC(pred_samp[0,:,:3],f'scripts/output/pred_pc_{ind}.obj')

        pc_samp = pc_samp.repeat(1, 2).unsqueeze(0)
        tdist += fscore.score(pred_samp.squeeze().T.unsqueeze(0),
                              pc_samp.squeeze().T.unsqueeze(0))
        count += 1

    print(f"Average F-score: {tdist/count}")
Ejemplo n.º 7
0
def main(dataset_path, outdir):
    indices, progs = load_progs(dataset_path)

    os.system(f'mkdir {outdir}')
    os.system(f'mkdir {outdir}/valid')
    os.system(f'mkdir {outdir}/non_valid')

    count = 0

    for ind, prog in tqdm(list(zip(indices, progs))):
        count += 1

        if count < 0:
            continue

        lc = simplifyHP(prog)
        verts, faces = hier_execute(prog)

        bbdims = torch.tensor([
            float(a)
            for a in re.split(r'[()]', prog['prog'][0])[1].split(',')[:3]
        ])

        bb_viol = (verts.abs().max(dim=0).values / (bbdims / 2)).max()

        try:
            rooted = check_rooted(verts, faces)
        except Exception as e:
            print(f"Failed rooted check with {e}")
            rooted = False

        if DO_STABLE:
            stable = check_stability(verts, faces)
        else:
            stable = True

        if lc <= MAX_LEAVES and lc >= MIN_LEAVES and bb_viol < BB_THRESH and rooted and stable:
            utils.writeHierProg(prog, f'{outdir}/valid/{ind}.txt')
            utils.writeObj(verts, faces, f'{outdir}/valid/{ind}.obj')
        else:
            utils.writeHierProg(prog, f'{outdir}/non_valid/{ind}.txt')
            utils.writeObj(verts, faces, f'{outdir}/non_valid/{ind}.obj')
Ejemplo n.º 8
0
def gen_metrics(gen_progs,
                outpath,
                exp_name,
                epoch,
                VERBOSE,
                write_progs=True):
    misses = 0.
    results = {
        'num_parts': [],
        'rootedness': [],
        'stability': [],
    }

    samples = []

    for i, prog in enumerate(gen_progs):
        try:
            verts, faces = hier_execute(prog)
            assert not torch.isnan(verts).any(), 'saw nan vert'
            if write_progs:
                utils.writeObj(
                    verts, faces,
                    f"{outpath}/{exp_name}/objs/gen/{epoch}_{i}.obj")
                utils.writeHierProg(
                    prog, f"{outpath}/{exp_name}/programs/gen/{epoch}_{i}.txt")

            results['num_parts'].append(verts.shape[0] / 8.0)
            samples.append((verts, faces))

        except Exception as e:
            misses += 1.
            if VERBOSE:
                print(f"failed gen metrics for {i} with {e}")
            continue

        try:
            if check_rooted(verts, faces):
                results['rootedness'].append(1.)
            else:
                results['rootedness'].append(0.)

            if check_stability(verts, faces):
                results['stability'].append(1.)
            else:
                results['stability'].append(0.)

        except Exception as e:
            if VERBOSE:
                print(f"failed rooted/stable with {e}")

    for key in results:
        if len(results[key]) > 0:
            res = torch.tensor(results[key]).mean().item()
        else:
            res = 0.

        results[key] = res

    try:
        results['variance'] = eval_get_var(samples)
    except Exception as e:
        results['variance'] = 0.
        if VERBOSE:
            print(f"failed getting variance with {e}")

    return results, misses
Ejemplo n.º 9
0
def recon_metrics(recon_sets, outpath, exp_name, name, epoch, VERBOSE):
    misses = 0.
    results = {
        'fscores': [],
        'iou_shape': [],
        'param_dist_parts': [],
    }

    for prog, gt_prog, prog_ind in recon_sets:

        bbox = getBBox(gt_prog)

        gt_verts, gt_faces, gt_hscene = hier_execute(gt_prog, return_all=True)

        gt_cubes = [[CuboidToParams(c) for c in scene] for scene in gt_hscene]

        try:
            verts, faces, hscene = hier_execute(prog, return_all=True)
            cubes = [[CuboidToParams(c) for c in scene] for scene in hscene]

            assert not torch.isnan(verts).any(), 'saw nan vert'

        except Exception as e:
            misses += 1.
            if VERBOSE:
                print(f"failed recon metrics for {prog_ind} with {e}")
            continue

        verts = verts.to(device)
        gt_verts = gt_verts.to(device)
        faces = faces.to(device)
        gt_faces = gt_faces.to(device)

        gt_objs = os.listdir(f"{outpath}/{exp_name}/objs/gt/")

        if f"{prog_ind}.obj" not in gt_objs:
            utils.writeObj(gt_verts, gt_faces,
                           f"{outpath}/{exp_name}/objs/gt/{prog_ind}.obj")
            utils.writeHierProg(
                gt_prog, f"{outpath}/{exp_name}/programs/gt/{prog_ind}.txt")

        try:
            utils.writeObj(
                verts, faces,
                f"{outpath}/{exp_name}/objs/{name}/{epoch}_{prog_ind}.obj")
            utils.writeHierProg(
                prog,
                f"{outpath}/{exp_name}/programs/{name}/{epoch}_{prog_ind}.txt")

        except Exception as e:
            print(f"Failed writing prog/obj for {prog_ind} with {e}")

        try:
            fs = getFScore(verts, faces, gt_verts, gt_faces)
            if fs is not None:
                results['fscores'].append(fs)
        except Exception as e:
            if VERBOSE:
                print(f"failed Fscore for {prog_ind} with {e}")

        try:
            siou = getShapeIoU(cubes, gt_cubes, bbox)
            if siou is not None:
                results['iou_shape'].append(siou)
        except Exception as e:
            if VERBOSE:
                print(f"failed Shape Iou for {prog_ind} with {e}")

        try:
            pd = getParamDist(cubes, gt_cubes, bbox)
            if pd is not None:
                results['param_dist_parts'].append(pd)
        except Exception as e:
            if VERBOSE:
                print(f"failed param dist for {prog_ind} with {e}")

    for key in results:
        if len(results[key]) > 0:
            res = torch.tensor(results[key]).mean().item()
        else:
            res = 0.

        results[key] = res

    return results, misses
Ejemplo n.º 10
0
    q += next_q
    while len(q) > 0:
        prog, hier_index, bbox_dims = q.pop(0)
        next_q = rand_program(prog, num_cuboids, bbox_dims, hier_index)
        if next_q is None:
            prog.pop('prog')
            prog.pop('children')
            num_prev_children = 0
        else:
            q += next_q
            num_prev_children = len(prog['children'])
        num_cuboids = num_cuboids - num_prev_children + 2

        # if not enough remaining cuboids to keep expanding
        # pull all children out of q and make them leaves
        if not num_cuboids > 2:
            while len(q) > 0:
                prog, hier_index, bbox_dims = q.pop(0)
                prog.pop('prog')
                prog.pop('children')
            break

    return hier_prog


for n in range(10):
    prog = rand_hier_program()
    verts, faces = hier_execute(prog)
    writeObj(verts, faces, f'{n}.obj')
    writeHierProg(prog, f"{n}.txt")
Ejemplo n.º 11
0
    errors = 0.
    gpfsv = 0.
    gpcfsv = 0.

    os.system(f'mkdir {outdir}')

    for ind in inds:
        count += 1.

        try:
            hier = jp.parseJsonToHier(ind, CATEGORY)
            nshier = jp.parseJsonToHier(ind, CATEGORY, True)

            gen.generate_program(hier)

            pverts, pfaces = hier_execute(hier)
            tverts, tfaces = get_gt_geom(nshier, False)

            tsamps = utils.sample_surface(tfaces, tverts.unsqueeze(0), 10000)

            try:

                psamps = utils.sample_surface(pfaces, pverts.unsqueeze(0),
                                              10000)

                pfs = fscore.score(psamps.squeeze().T.unsqueeze(0),
                                   tsamps.squeeze().T.unsqueeze(0))

            except Exception:
                pfs = 0.
Ejemplo n.º 12
0
def fit(prog_path, obj_path, out_path):
    progs = os.listdir(prog_path)
    objs = os.listdir(obj_path)
    fitted_progs = []
    for i, prg in enumerate(progs):
        print(f"fitting program {i}")
        sa = ShapeAssembly()
        p_no_e = prg.split("_")[1]
        index = int(p_no_e.split(".")[0])

        # should be shape N x 3
        tverts, tfaces = loadObj(f"{obj_path}/{index}.obj")

        tverts = torch.tensor(tverts)
        tfaces = torch.tensor(tfaces).long()

        out_file = f"{out_path}/{index}"
        with open(f"{prog_path}/{prg}") as file:
            lines = file.readlines()
        hier, param_dict, param_list = sa.make_hier_param_dict(lines)

        opt = torch.optim.Adam(param_list, 0.001)

        start = torch.cat(param_list).clone()

        for iter in range(400):
            verts, faces = sa.diff_run(hier, param_dict)

            samps = sample_surface(faces, verts.unsqueeze(0), 10000)
            tsamps = sample_surface(tfaces, tverts.unsqueeze(0), 10000)
            closs = cham_loss(samps.squeeze().T.unsqueeze(0).cuda(),
                              tsamps.squeeze().T.unsqueeze(0).cuda(), 0.0)

            ploss = (torch.cat(param_list) - start).abs().sum()

            loss = closs + ploss.cuda() * 0.001

            opt.zero_grad()
            loss.backward()
            opt.step()

        # # prevent cuboids from having 0 dimensions
        new_param_dict = {}
        for p in param_dict:
            new_p = []
            for param in param_dict[p]:
                if param[0] == "Cuboid":
                    new_attrs = []
                    for attr in param[1]:
                        if torch.is_tensor(attr):
                            new_attr = torch.clamp(attr, min=0.01).detach()
                            new_attrs.append(new_attr)
                        else:
                            new_attrs.append(attr)
                    new_p.append((param[0], new_attrs))
                else:
                    new_p.append(param)
            new_param_dict[p] = new_p

        sa.fill_hier(hier, new_param_dict)
        verts, faces = hier_execute(hier)

        writeObj(verts, faces, out_file + '.obj')
        writeHierProg(hier, out_file + '.txt')

        fitted_progs.append((hier, index))

    return fitted_progs