Exemple #1
0
def get_topK_parses(img,
                    k,
                    score_fn,
                    configs_per=100,
                    trials_per=800,
                    device=None,
                    seed=3,
                    **grp_kwargs):
    # generate random walks (the "base parses")
    base_parses = generate_random_parses(I=img, seed=seed, **grp_kwargs)
    # convert strokes to minimal splines
    base_parses = [process_parse(parse, device) for parse in base_parses]

    # search for best stroke ordering & stroke direction configurations
    np.random.seed(seed)
    n = len(base_parses)
    parses = []
    log_probs = []
    for i in range(n):
        parses_i, log_probs_i = search_parse(base_parses[i], score_fn,
                                             configs_per, trials_per)
        parses.extend(parses_i)
        log_probs.append(log_probs_i)
    log_probs = torch.cat(log_probs)

    # refine to unique & sort
    log_probs, idx = np.unique(log_probs.cpu().numpy(), return_index=True)
    log_probs = torch.from_numpy(log_probs).flip(dims=[0])
    idx = torch.from_numpy(idx).flip(dims=[0])
    parses = [parses[i] for i in idx]

    return parses[:k], log_probs[:k]
Exemple #2
0
def main():
    # load image to numpy binary array
    img = imageio.imread('./image_H.jpg')
    img = np.array(img > 200)

    # generate random parses
    parses = generate_random_parses(img, seed=3)

    # plot parsing results
    nparse = len(parses)
    n = math.ceil(nparse / 10)
    m = 10
    fig, axes = plt.subplots(n, m + 1, figsize=(m + 1, n))
    # first column
    axes[0, 0].imshow(img, cmap=plt.cm.binary)
    axes[0, 0].set_xticks([])
    axes[0, 0].set_yticks([])
    axes[0, 0].set_title('Input')
    for i in range(1, n):
        axes[i, 0].set_axis_off()
    # remaining_columns
    for i in range(n):
        for j in range(1, m + 1):
            ix = i * m + (j - 1)
            if ix >= nparse:
                axes[i, j].set_axis_off()
                continue
            plot_parse(axes[i, j], parses[ix])
    plt.subplots_adjust(hspace=0., wspace=0.)
    plt.show()
Exemple #3
0
def get_topK_parses(img,
                    k,
                    score_fn,
                    configs_per=100,
                    trials_per=800,
                    device=None,
                    seed=3,
                    pp_kwargs=None,
                    **grp_kwargs):
    """ copied from
    gns.inference.parsing.top_k, except allwoing for pp_kwargs, which is useful 
    if want to skip spline fit optimiziation step
    """
    from pybpl.matlab.bottomup import generate_random_parses
    from gns.inference.parsing.top_k import search_parse

    # generate random walks (the "base parses")
    base_parses = generate_random_parses(I=img, seed=seed, **grp_kwargs)
    # convert strokes to minimal splines
    base_parses = [
        process_parse(parse, device, **pp_kwargs) for parse in base_parses
    ]

    # search for best stroke ordering & stroke direction configurations
    np.random.seed(seed)
    n = len(base_parses)
    parses = []
    log_probs = []
    for i in range(n):
        parses_i, log_probs_i = search_parse(base_parses[i], score_fn,
                                             configs_per, trials_per)
        parses.extend(parses_i)
        log_probs.append(log_probs_i)
    log_probs = torch.cat(log_probs)

    # refine to unique & sort
    log_probs, idx = np.unique(log_probs.cpu().numpy(), return_index=True)
    log_probs = torch.from_numpy(log_probs).flip(dims=[0])
    idx = torch.from_numpy(idx).flip(dims=[0])
    parses = [parses[i] for i in idx]

    return parses[:k], log_probs[:k]