Ejemplo n.º 1
0
def make_pm_prevs(model, dataloaders, cfg, centroids, all_labels, device):
    feats, labels_pos = get_features(model, dataloaders['all'], device)

    probas = calc_bagging(np.concatenate(feats),
                          np.concatenate(labels_pos),
                          T=cfg.bag_t,
                          bag_max_depth=cfg.bag_max_depth,
                          bag_n_feats=cfg.bag_n_feats)
    frames = [s['frame_idx'] for s in dataloaders['prev']]
    frames = [item for sublist in frames for item in sublist]

    df = centroids.assign(desc=np.concatenate(feats), proba=probas)
    scores = get_pm_array(all_labels, df)
    scores = [scores[f] for f in frames]
    scores_thr = [(s > 0.5).astype(float) for s in scores]
    scores = [colorize(s) for s in scores]
    scores_thr = [colorize(s) for s in scores_thr]
    images = [
        np.rollaxis(s['image_unnormal'].squeeze().cpu().numpy(), 0, 3)
        for s in dataloaders['prev']
    ]
    all_images = (np.concatenate(images, axis=1)).astype(np.uint8)
    all_scores = np.concatenate(scores, axis=1)
    all_scores_thr = np.concatenate(scores_thr, axis=1)
    all = np.concatenate((all_images, all_scores, all_scores_thr), axis=0)

    return all
Ejemplo n.º 2
0
def main(cfg):

    device = torch.device('cuda' if cfg.cuda else 'cpu')
    model = Siamese(embedded_dims=cfg.embedded_dims,
                    cluster_number=cfg.n_clusters,
                    alpha=cfg.alpha,
                    backbone=cfg.backbone).to(device)

    dl_single = Loader(pjoin(cfg.in_root, 'Dataset' + cfg.train_dir),
                       normalization='rescale',
                       resize_shape=cfg.in_shape)

    dataloader = DataLoader(dl_single, collate_fn=dl_single.collate_fn)

    run_path = pjoin(cfg.out_root, cfg.run_dir)

    path_ = pjoin(run_path, 'checkpoints', 'init_dec.pth.tar')
    print('loading checkpoint {}'.format(path_))
    state_dict = torch.load(path_, map_location=lambda storage, loc: storage)
    model.load_partial(state_dict)

    if (cfg.clf):
        print('changing output of decoder to 1 channel')
        model.dec.autoencoder.to_predictor()

    features, pos_masks = clst.get_features(model, dataloader, device)
    cat_features = np.concatenate(features)
    cat_pos_mask = np.concatenate(pos_masks)
    print('computing probability map')
    probas = calc_bagging(
        cat_features,
        cat_pos_mask,
        # cfg.bag_t,
        30,
        bag_max_depth=cfg.bag_max_depth,
        bag_n_feats=cfg.bag_n_feats,
        n_jobs=1)

    labels = np.rollaxis(dl_single.labels, -1, 0)

    pm_scores_fg = get_pm_array(labels, probas)

    pm_map = colorize(pm_scores_fg[cfg.frame])
    pm_thr_map = colorize(pm_scores_fg[cfg.frame] > 0.5)
    cluster_maps = make_cluster_maps(model, dataloader, device)[cfg.frame]

    probas = torch.from_numpy(probas).to(device)
    n_labels = [np.unique(s['labels']).size for s in dataloader.dataset]
    probas = torch.split(probas, n_labels)
    import pdb
    pdb.set_trace()  ## DEBUG ##
    print('Generating connected components graphs')
    edges_list, subgraphs = utls.make_edges_ccl(model,
                                                dataloader,
                                                device,
                                                probas,
                                                return_subgraphs=True)
Ejemplo n.º 3
0
def main(cfg):

    data = dict()
    out_path = pjoin(cfg.out_path, '{}'.format(cfg.exp_name),
                     'ksp_segmentation')

    if (os.path.exists(out_path)):
        print('ouput path {} already exists.'.format(out_path))
        # return cfg
    else:
        os.makedirs(out_path)

    # Set logger
    print('-' * 10)
    print('starting segmentation on: {}'.format(cfg.in_path))
    print('2d locs filename: {}'.format(cfg.locs_fname))
    print('Output path: {}'.format(out_path))
    print('-' * 10)

    precomp_desc_path = pjoin(cfg.in_path, cfg.precomp_dir)
    if (not os.path.exists(precomp_desc_path)):
        os.makedirs(precomp_desc_path)

    with open(pjoin(out_path, 'cfg.yml'), 'w') as outfile:
        yaml.dump(cfg.__dict__, stream=outfile, default_flow_style=False)

    # ---------- Descriptors/superpixel costs
    spext = SuperpixelExtractor(cfg.in_path, cfg.precomp_dir,
                                cfg.slic_compactness, cfg.slic_n_sp)
    spext.run()

    link_agent, desc_df = make_link_agent(cfg)

    print('Building superpixel managers')
    sps_man = spm.SuperpixelManager(cfg.in_path, cfg.precomp_dir,
                                    link_agent.labels, desc_df,
                                    cfg.init_radius)
    print('Using foreground model from model')
    pm = utls.probas_to_df(link_agent.labels, link_agent.obj_preds)

    g_for = gtrack.GraphTracking(link_agent, sps_man=sps_man)

    g_back = gtrack.GraphTracking(link_agent, sps_man=sps_man)

    find_new_forward = True
    find_new_backward = True
    i = 0

    pos_sp_for = []
    pos_sp_back = []
    list_ksp = []
    pos_tls_for = None
    pos_tls_back = None

    dict_ksp = dict()

    # make forward and backward graphs
    g_back.make_graph(desc_df,
                      pm,
                      cfg.pm_thr,
                      cfg.norm_neighbor,
                      direction='backward',
                      labels=link_agent.labels)
    g_for.make_graph(desc_df,
                     pm,
                     cfg.pm_thr,
                     cfg.norm_neighbor,
                     direction='forward',
                     labels=link_agent.labels)

    print("Computing KSP on backward graph.")
    sps = g_back.run()
    dict_ksp['backward_sets'] = sps

    print("Computing KSP on forward graph.")
    sps = g_for.run()
    dict_ksp['forward_sets'] = sps

    all_sps = list(set(dict_ksp['forward_sets'] + dict_ksp['backward_sets']))
    print('got ', len(all_sps), ' unique superpixels')

    # Saving
    fileOut = pjoin(out_path, 'results.npz')
    data = dict()
    data['ksp_scores_mat'] = utls.get_binary_array(link_agent.labels,
                                                   np.array(all_sps))
    data['pm_scores_mat'] = utls.get_pm_array(link_agent.labels, pm)
    data['paths_back'] = dict_ksp['backward_sets']
    data['paths_for'] = dict_ksp['forward_sets']
    data['all_sps'] = all_sps
    print("Saving results and cfg to: " + fileOut)
    np.savez(fileOut, **data)

    print('Finished experiment: ', out_path)

    write_frames_results.main(cfg, out_path)
    comp_scores.main(cfg, out_path)

    return cfg
Ejemplo n.º 4
0
def main(cfg):
    locs2d = utls.readCsv(
        os.path.join(cfg.in_path, cfg.locs_dir, cfg.csv_fname))

    # ---------- Descriptors/superpixel costs
    dm = DataManager(cfg.in_path, cfg.precomp_dir)
    dm.calc_superpix(cfg.slic_compactness, cfg.slic_n_sp)

    link_agent, desc_df = make_link_agent(cfg)

    if (cfg.use_siam_pred):
        print('will use DEC/siam objectness probabilities')
        probas = link_agent.obj_preds
        pm_scores_fg = utls.get_pm_array(link_agent.labels, probas)
    else:
        pm = utls.calc_pm(desc_df,
                          np.array(link_agent.get_all_entrance_sps(desc_df)),
                          cfg.bag_n_feats, cfg.bag_t, cfg.bag_max_depth,
                          cfg.bag_max_samples, cfg.bag_jobs)
        pm_scores_fg = utls.get_pm_array(link_agent.labels, pm)

    dl = LocPriorDataset(cfg.in_path,
                         resize_shape=512,
                         normalization='rescale',
                         csv_fname=cfg.csv_fname)

    cluster_maps = link_agent.make_cluster_maps()

    if (cfg.do_all):
        cfg.fin = np.arange(len(dl))

    ims = []
    pbar = tqdm.tqdm(total=len(cfg.fin))
    for fin in cfg.fin:

        loc = locs2d[locs2d['frame'] == fin]
        if (loc.shape[0] > 0):
            i_in, j_in = link_agent.get_i_j(loc.iloc[0])

            entrance_probas = np.zeros(link_agent.labels.shape[1:])
            label_in = link_agent.labels[fin, i_in, j_in]
            for l in np.unique(link_agent.labels[fin]):
                proba = link_agent.get_proba(fin, label_in, fin, l, desc_df)
                entrance_probas[link_agent.labels[fin] == l] = proba

            truth = dl[fin]['label/segmentation'][..., 0]
            truth_ct = segmentation.find_boundaries(truth, mode='thick')
            im1 = dl[fin]['image_unnormal']
            rr, cc = draw.circle_perimeter(i_in,
                                           j_in,
                                           int(cfg.norm_neighbor_in *
                                               im1.shape[1]),
                                           shape=im1.shape)

            im1[truth_ct, ...] = (255, 0, 0)

            im1[rr, cc, 0] = 0
            im1[rr, cc, 1] = 255
            im1[rr, cc, 2] = 0

            im1 = csv.draw2DPoint(locs2d.to_numpy(), fin, im1, radius=7)
            ims_ = []
            ims_.append(im1)
            ims_.append(colorize(pm_scores_fg[fin]))
            ims_.append(
                colorize((pm_scores_fg[fin] > cfg.pm_thr).astype(float)))
            ims_.append(cluster_maps[fin])
            ims_.append(colorize(entrance_probas))
            ims.append(ims_)

        else:
            im1 = dl[fin]['image_unnormal']

            ims_ = []
            ims_.append(im1)
            ims_.append(colorize(pm_scores_fg[fin]))
            ims_.append(
                colorize((pm_scores_fg[fin] > cfg.pm_thr).astype(float)))
            ims_.append(cluster_maps[fin])
            ims_.append(colorize(np.zeros_like(pm_scores_fg[fin])))
            ims.append(ims_)

        pbar.update(1)
    pbar.close()

    if (cfg.do_all):
        print('will save all to {}'.format(cfg.save_path))
        if (not os.path.exists(cfg.save_path)):
            os.makedirs(cfg.save_path)
        pbar = tqdm.tqdm(total=len(ims))
        for i, im in enumerate(ims):
            io.imsave(pjoin(cfg.save_path, 'im_{:04d}.png'.format(i)),
                      np.concatenate(im, axis=1))
            pbar.update(1)
        pbar.close()

    if (cfg.return_dict):
        ims_dicts = []
        for ims_ in ims:
            dict_ = {
                'image': ims_[0],
                'pm': ims_[1],
                'pm_thr': ims_[2],
                'clusters': ims_[3],
                'entrance': ims_[4]
            }
            ims_dicts.append(dict_)
        return ims_dicts

    return np.concatenate([np.concatenate(im, axis=1) for im in ims], axis=0)
Ejemplo n.º 5
0
def main(cfg):

    data = dict()

    d = datetime.datetime.now()
    cfg.run_dir = pjoin(cfg.out_path, '{}'.format(cfg.exp_name))

    if (os.path.exists(cfg.run_dir)):
        print('run dir {} already exists.'.format(cfg.run_dir))
        return cfg
    else:
        os.makedirs(cfg.run_dir)

    # Set logger
    utls.setup_logging(cfg.run_dir)
    logger = logging.getLogger('ksp')

    logger.info('-' * 10)
    logger.info('starting experiment on: {}'.format(cfg.in_path))
    logger.info('2d locs filename: {}'.format(cfg.csv_fname))
    logger.info('Output path: {}'.format(cfg.run_dir))
    logger.info('-' * 10)

    precomp_desc_path = pjoin(cfg.in_path, cfg.precomp_dir)
    if (not os.path.exists(precomp_desc_path)):
        os.makedirs(precomp_desc_path)

    with open(pjoin(cfg.run_dir, 'cfg.yml'), 'w') as outfile:
        yaml.dump(cfg.__dict__, stream=outfile, default_flow_style=False)

    # ---------- Descriptors/superpixel costs
    dm = DataManager(cfg.in_path, cfg.precomp_dir, feats_mode=cfg.feats_mode)
    dm.calc_superpix(cfg.slic_compactness, cfg.slic_n_sp)

    link_agent, desc_df = make_link_agent(cfg)

    logger.info('Building superpixel managers')
    sps_man = spm.SuperpixelManager(cfg.in_path,
                                    cfg.precomp_dir,
                                    link_agent.labels,
                                    desc_df,
                                    init_radius=cfg.sp_trans_init_radius,
                                    hoof_n_bins=cfg.hoof_n_bins)

    locs2d_sps = link_agent.get_all_entrance_sps(desc_df)
    desc_df['positive'] = locs2d_sps

    if (cfg.use_siam_pred):
        pm = utls.probas_to_df(link_agent.labels, link_agent.obj_preds)
    else:
        pm = utls.calc_pm(desc_df, desc_df['positive'], cfg.bag_n_feats,
                          cfg.bag_t, cfg.bag_max_depth, cfg.bag_max_samples,
                          cfg.bag_jobs)

    ksp_scores_mat = []

    g_for = gtrack.GraphTracking(link_agent, sps_man=sps_man)

    g_back = gtrack.GraphTracking(link_agent, sps_man=sps_man)

    find_new_forward = True
    find_new_backward = True
    i = 0

    pos_sp_for = []
    pos_sp_back = []
    list_ksp = []
    pos_tls_for = None
    pos_tls_back = None

    dict_ksp = dict()
    while ((find_new_forward or find_new_backward) and (i < cfg.n_iters_ksp)):

        logger.info("i: " + str(i + 1))

        if ((i > 0) & find_new_forward):
            g_for.merge_tracklets_temporally(pos_tls_for, pm, desc_df,
                                             cfg.pm_thr)

        if ((i > 0) & find_new_backward):
            g_back.merge_tracklets_temporally(pos_tls_back, pm, desc_df,
                                              cfg.pm_thr)
        # Make backward graph
        if (find_new_backward):

            g_back.makeFullGraph(desc_df,
                                 pm,
                                 cfg.pm_thr,
                                 cfg.hoof_tau_u,
                                 cfg.norm_neighbor,
                                 direction='backward',
                                 labels=link_agent.labels)

            logger.info("Computing KSP on backward graph. (i: {}".format(i +
                                                                         1))
            g_back.run()
            dict_ksp['backward_sets'], pos_tls_back = utls.ksp2sps(
                g_back.kspSet, g_back.tracklets)
            dict_ksp['backward_tracklets'] = g_back.tracklets
            dict_ksp['backward_costs'] = g_back.costs

        # Make forward graph
        if (find_new_forward):

            g_for.makeFullGraph(desc_df,
                                pm,
                                cfg.pm_thr,
                                cfg.hoof_tau_u,
                                cfg.norm_neighbor,
                                direction='forward',
                                labels=link_agent.labels)

            logger.info("Computing KSP on forward graph. (i: {})".format(i +
                                                                         1))
            g_for.run()
            dict_ksp['forward_sets'], pos_tls_for = utls.ksp2sps(
                g_for.kspSet, g_for.tracklets)
            dict_ksp['forward_tracklets'] = g_for.tracklets

        if ((find_new_forward or find_new_backward)):

            ksp_scores_mat = utls.sp_tuples_to_mat(
                dict_ksp['forward_sets'] + dict_ksp['backward_sets'],
                link_agent.labels)

            # Update marked superpixels if graph is not "finished"
            if (find_new_forward):
                marked_for = [
                    m for sublist in dict_ksp['forward_sets'] for m in sublist
                ]
                pos_sp_for.append(len(marked_for))

                logger.info("""Forward graph. Number of positive sps
                            of ksp at iteration {}: {}""".format(
                    i + 1, len(marked_for)))
                if (i > 0):
                    if (pos_sp_for[-1] <= pos_sp_for[-2]):
                        find_new_forward = False

            if (find_new_backward):
                marked_back = [
                    m for sublist in dict_ksp['backward_sets'] for m in sublist
                ]
                pos_sp_back.append(len(marked_back))

                logger.info("""Backward graph. Number of positive sps of
                                ksp at iteration {}: {} """.format(
                    i + 1, len(marked_back)))
                if (i > 0):
                    if (pos_sp_back[-1] <= pos_sp_back[-2]):
                        find_new_backward = False

            list_ksp.append(dict_ksp)

            n_pix_ksp = np.sum((ksp_scores_mat > 0).ravel())
            logger.info("""Number hit pixels of ksp at iteration {}:
                        {}""".format(i + 1, n_pix_ksp))

            fileOut = pjoin(cfg.run_dir, 'pm_scores_iter_{}.npz'.format(i))
            data = dict()
            data['ksp_scores_mat'] = ksp_scores_mat
            np.savez(fileOut, **data)

            # Recompute PM values
            if (i + 1 < cfg.n_iters_ksp):
                desc_df = merge_positives(desc_df, marked_for, marked_back)
                pm = utls.calc_pm(desc_df, desc_df['positive'],
                                  cfg.bag_n_feats, cfg.bag_t,
                                  cfg.bag_max_depth, cfg.bag_max_samples,
                                  cfg.bag_jobs)

            i += 1

    # Saving
    fileOut = pjoin(cfg.run_dir, 'results.npz')
    data = dict()
    data['n_iters_ksp'] = cfg.n_iters_ksp
    data['ksp_scores_mat'] = ksp_scores_mat
    data['pm_scores_mat'] = utls.get_pm_array(link_agent.labels, pm)
    data['paths_back'] = dict_ksp['backward_sets']
    data['paths_for'] = dict_ksp['forward_sets']
    logger.info("Saving results and cfg to: " + fileOut)
    np.savez(fileOut, **data)

    logger.info("done")

    logger.info('Finished experiment: ' + cfg.run_dir)

    write_frames_results.main(cfg, cfg.run_dir, logger)
    comp_scores.main(cfg)

    return cfg
Ejemplo n.º 6
0
def main(cfg):
    locs2d = utls.readCsv(
        os.path.join(cfg.in_path, cfg.locs_dir, cfg.locs_fname))

    # ---------- Descriptors/superpixel costs
    spext = SuperpixelExtractor(cfg.in_path,
                                desc_dir=cfg.precomp_dir,
                                compactness=cfg.slic_compactness,
                                n_segments=cfg.slic_n_sp)
    spext.run()

    link_agent, _ = make_link_agent(cfg)

    probas = link_agent.obj_preds
    pm_scores_fg = utls.get_pm_array(link_agent.labels, probas)

    dl = LocPriorDataset(cfg.in_path,
                         normalization='rescale',
                         locs_fname=cfg.locs_fname,
                         sp_labels_fname='sp_labels.npy')

    scores = dict()
    if cfg.do_scores:
        shape = pm_scores_fg.shape[1:]
        truths = np.array([
            transform.resize(s['label/segmentation'],
                             shape,
                             preserve_range=True).astype(np.uint8) for s in dl
        ])
        fpr, tpr, _ = roc_curve(truths.flatten(), pm_scores_fg.flatten())
        precision, recall, _ = precision_recall_curve(
            truths.flatten(),
            pm_scores_fg.flatten() >= 0.5)
        precision = precision[1]
        recall = recall[1]
        nom = 2 * (precision * recall)
        denom = (precision + recall)
        if denom > 0:
            f1 = nom / denom
        else:
            f1 = 0.

        auc_ = auc(fpr, tpr)
        scores['f1'] = f1
        scores['auc'] = auc_
        scores['fpr'] = fpr
        scores['tpr'] = tpr

    if (cfg.do_all):
        cfg.fin = np.arange(len(dl))

    ims = []
    pbar = tqdm.tqdm(total=len(cfg.fin))
    for fin in cfg.fin:

        loc = locs2d[locs2d['frame'] == fin]
        if (loc.shape[0] > 0):
            i_in, j_in = link_agent.get_i_j(loc.iloc[0])

            truth = dl[fin]['label/segmentation']
            truth_ct = segmentation.find_boundaries(truth, mode='thick')
            im1 = (255 * dl[fin]['image']).astype(np.uint8)
            rr, cc = draw.circle_perimeter(i_in,
                                           j_in,
                                           int(cfg.norm_neighbor_in *
                                               im1.shape[1]),
                                           shape=im1.shape)
            pos_labels = dl[fin]['annotations']

            pos_sps = [
                dl[fin]['labels'].squeeze() == l for l in pos_labels['label']
            ]

            pos_ct = [segmentation.find_boundaries(p) for p in pos_sps]

            for p in pos_ct:
                im1[p, ...] = (0, 255, 0)

            im1[truth_ct, ...] = (255, 0, 0)

            im1[rr, cc, 0] = 0
            im1[rr, cc, 1] = 255
            im1[rr, cc, 2] = 0

            im1 = csv.draw2DPoint(locs2d.to_numpy(), fin, im1, radius=7)
            ims_ = []
            ims_.append(im1)
            ims_.append(colorize(pm_scores_fg[fin]))
            ims_.append(
                colorize((pm_scores_fg[fin] >= cfg.pm_thr).astype(float)))
            ims.append(ims_)

        else:

            im1 = (255 * dl[fin]['image']).astype(np.uint8)
            ims_ = []
            ims_.append(im1)
            ims_.append(colorize(pm_scores_fg[fin]))
            ims_.append(
                colorize((pm_scores_fg[fin] >= cfg.pm_thr).astype(float)))
            ims_.append(colorize(np.zeros_like(pm_scores_fg[fin])))
            ims.append(ims_)

        pbar.update(1)
    pbar.close()

    if (cfg.do_all):
        print('will save all to {}'.format(cfg.save_path))
        if (not os.path.exists(cfg.save_path)):
            os.makedirs(cfg.save_path)
        pbar = tqdm.tqdm(total=len(ims))
        for i, im in enumerate(ims):
            io.imsave(pjoin(cfg.save_path, 'im_{:04d}.png'.format(i)),
                      np.concatenate(im, axis=1))
            pbar.update(1)
        pbar.close()

    res = dict()
    ims_dicts = []
    for ims_ in ims:
        dict_ = {
            'image': ims_[0],
            'pm': ims_[1],
            'pm_thr': ims_[2],
        }
        ims_dicts.append(dict_)
    res['images'] = ims_dicts
    res['scores'] = scores
    return res