'Dataset30'
}
masks_paths_darnet = {
    ('Dataset00', 'Dataset01', 'Dataset02', 'Dataset03', 'Dataset04', 'Dataset05'):
    'Dataset00',
    ('Dataset10', 'Dataset11', 'Dataset12', 'Dataset13'):
    'Dataset10',
    ('Dataset20', 'Dataset21', 'Dataset22', 'Dataset23', 'Dataset24', 'Dataset25'):
    'Dataset20',
    ('Dataset30', 'Dataset31', 'Dataset32', 'Dataset33', 'Dataset34', 'Dataset35'):
    'Dataset30'
}

if __name__ == "__main__":

    p = params.get_params('../cfgs')

    p.add('--out-path', required=True)
    p.add('--root-path', required=True)
    p.add('--sets', nargs='+', required=True)
    p.add('--set-labeled', type=str, required=True)
    p.add('--labeled-frames', nargs='+', type=int, required=True)

    cfg = p.parse_args()

    gamma_range = np.arange(cfg.gc_gamma_range[0], cfg.gc_gamma_range[1],
                            cfg.gc_gamma_step)
    lambda_range = np.arange(cfg.gc_lambda_range[0], cfg.gc_lambda_range[1],
                             cfg.gc_lambda_step)

    assert (cfg.set_labeled == cfg.sets[0]
Exemple #2
0
from ksptrack import iterative_ksp
from ksptrack.cfgs import params as params_ksp
from ksptrack.siamese import (params, train_autoencoder, train_init_clst,
                              train_siam, train_all)

if __name__ == "__main__":

    p = params.get_params()
    p.add('--out-root', required=True)
    p.add('--in-root', required=True)
    p.add('--train-dirs', nargs='+', required=True)
    p.add('--run-dirs', nargs='+', required=True)
    cfg = p.parse_args()

    p_ksp = params_ksp.get_params('../cfgs')
    p_ksp.add('--out-path')
    p_ksp.add('--in-path')
    p_ksp.add('--siam-path', default='')
    p_ksp.add('--use-siam-pred', default=False, action='store_true')
    p_ksp.add('--siam-trans', default='lfda', type=str)
    cfg_ksp = p_ksp.parse_known_args(env_vars=None)[0]

    for run_dir, train_dir in zip(cfg.run_dirs, cfg.train_dirs):
        cfg.run_dir = run_dir
        cfg.train_dir = train_dir
        cfg_ksp.cuda = True
        cfg_ksp.in_path = pjoin(cfg.in_root, 'Dataset' + cfg.train_dir)
        cfg_ksp.out_path = pjoin(
            os.path.split(cfg.out_root)[0], 'ksptrack', cfg.run_dir)
Exemple #3
0
    res = np.load(os.path.join(out_path, 'results.npz'))

    frame_dir = os.path.join(out_path, 'results')
    if (not os.path.exists(frame_dir)):
        logger.info('Creating output frame dir: {}'.format(frame_dir))
        os.makedirs(frame_dir)

    scores = (res['ksp_scores_mat'].astype('uint8')) * 255

    for i in range(scores.shape[0]):
        logger.info('{}/{}'.format(i + 1, scores.shape[0]))
        io.imsave(os.path.join(frame_dir, 'im_{:04d}.png'.format(i)),
                  scores[i])

    if ('pm_scores_mat' in res.keys()):
        scores_pm = (res['pm_scores_mat'] * 255.).astype('uint8')
        for i in range(scores.shape[0]):
            logger.info('{}/{}'.format(i + 1, scores.shape[0]))
            io.imsave(os.path.join(frame_dir, 'im_pb_{}.png'.format(i)),
                      scores_pm[i])


if __name__ == "__main__":
    p = params.get_params()

    p.add('--out-path', required=True)

    cfg = p.parse_args()
    main(cfg)
Exemple #4
0
def train(cfg, model, device, dataloaders, run_path):

    cp_fname = 'cp_{}.pth.tar'.format(cfg.exp_name)
    best_cp_fname = 'best_{}.pth.tar'.format(cfg.exp_name)
    rags_prevs_path = pjoin(run_path, 'prevs_{}'.format(cfg.exp_name))

    path_ = pjoin(run_path, 'checkpoints', 'init_dec.pth.tar')
    print('loading checkpoint {}'.format(path_))
    state_dict = torch.load(path_, map_location=lambda storage, loc: storage)
    model.load_partial(state_dict)
    # model.dec.autoencoder.to_predictor()

    check_cp_exist = pjoin(run_path, 'checkpoints', best_cp_fname)
    if (os.path.exists(check_cp_exist)):
        print('found checkpoint at {}. Skipping.'.format(check_cp_exist))
        return

    features, pos_masks = clst.get_features(model, dataloaders['all_prev'],
                                            device)
    cat_features = np.concatenate(features)
    cat_pos_mask = np.concatenate(pos_masks)
    print('computing probability map')
    probas = calc_bagging(cat_features,
                          cat_pos_mask,
                          cfg.bag_t,
                          bag_max_depth=cfg.bag_max_depth,
                          bag_n_feats=cfg.bag_n_feats,
                          n_jobs=1)
    probas = torch.from_numpy(probas).to(device)
    n_labels = [
        np.unique(s['labels']).size for s in dataloaders['all_prev'].dataset
    ]
    probas = torch.split(probas, n_labels)

    if (not os.path.exists(rags_prevs_path)):
        os.makedirs(rags_prevs_path)

    p_ksp = params_ksp.get_params('../cfgs')
    p_ksp.add('--siam-path', default='')
    p_ksp.add('--in-path', default='')
    p_ksp.add('--do-all', default=False)
    p_ksp.add('--return-dict', default=False)
    p_ksp.add('--fin', nargs='+')
    cfg_ksp = p_ksp.parse_known_args(env_vars=None)[0]
    cfg_ksp.bag_t = 300
    cfg_ksp.bag_n_feats = cfg.bag_n_feats
    cfg_ksp.bag_max_depth = cfg.bag_max_depth
    cfg_ksp.siam_path = pjoin(run_path, 'checkpoints', 'init_dec.pth.tar')
    cfg_ksp.use_siam_pred = False
    cfg_ksp.use_siam_trans = False
    cfg_ksp.in_path = pjoin(cfg.in_root, 'Dataset' + cfg.train_dir)
    cfg_ksp.precomp_desc_path = pjoin(cfg_ksp.in_path, 'precomp_desc')
    cfg_ksp.fin = [s['frame_idx'] for s in dataloaders['prev'].dataset]

    print('generating previews to {}'.format(rags_prevs_path))
    # prev_ims = prev_trans_costs.main(cfg_ksp)
    # io.imsave(pjoin(rags_prevs_path, 'ep_0000.png'), prev_ims)

    writer = SummaryWriter(run_path)

    best_loss = float('inf')
    print('training for {} epochs'.format(cfg.epochs_dist))

    model.to(device)

    optimizers = {
        'feats':
        optim.Adam(params=[{
            'params': model.dec.autoencoder.parameters(),
            'lr': 1e-3,
        }],
                   weight_decay=0),
        'gcns':
        optim.Adam(params=[{
            'params': model.locmotionapp.parameters(),
            'lr': 1e-1,
        }],
                   weight_decay=0),
        'pred':
        optim.Adam(params=[{
            'params': model.rho_dec.parameters(),
            'lr': 1e-3,
        }],
                   weight_decay=0),
        'assign':
        optim.Adam(params=[{
            'params': model.dec.assignment.parameters(),
            'lr': 1e-3,
        }],
                   weight_decay=0),
        'transform':
        optim.Adam(params=[{
            'params': model.dec.transform.parameters(),
            'lr': 1e-3,
        }],
                   weight_decay=0)
    }

    lr_sch = {
        'feats':
        torch.optim.lr_scheduler.ExponentialLR(optimizers['feats'],
                                               cfg.lr_power),
        'assign':
        torch.optim.lr_scheduler.ExponentialLR(optimizers['assign'],
                                               cfg.lr_power),
        'pred':
        torch.optim.lr_scheduler.ExponentialLR(optimizers['pred'],
                                               cfg.lr_power),
        'transform':
        torch.optim.lr_scheduler.ExponentialLR(optimizers['transform'],
                                               cfg.lr_power),
        'gcns':
        torch.optim.lr_scheduler.ExponentialLR(optimizers['gcns'],
                                               cfg.lr_power)
    }

    distrib_buff = DistribBuffer(cfg.tgt_update_period,
                                 thr_assign=cfg.thr_assign)
    distrib_buff.maybe_update(model, dataloaders['all_prev'], device)
    print('Generating connected components graphs')
    edges_list = utls.make_edges_ccl(model,
                                     dataloaders['edges'],
                                     device,
                                     return_signed=True,
                                     add_self_loops=True)

    for epoch in range(cfg.epochs_dist):

        if epoch < cfg.epochs_pre_pred:
            mode = 'pred'
            cfg.fix_clst = True
            cfg.clf = True
            cfg.clf_reg = False
            cfg.pw = False
        else:
            if (epoch >= cfg.epochs_pre_pred):
                if epoch == cfg.epochs_pre_pred:
                    print('training k-means')
                    init_clusters, preds, L, feats, labels = train_kmeans(
                        model,
                        dataloaders['all_prev'],
                        device,
                        cfg.n_clusters,
                        embedded_dims=cfg.embedded_dims,
                        reduc_method='pca')
                    L = torch.tensor(L).float().to(device)
                    init_clusters = torch.tensor(init_clusters,
                                                 dtype=torch.float).to(device)

                    print('Setting dim reduction and init. clusters')
                    model.dec.set_clusters(init_clusters)
                    model.dec.set_transform(L.T)
                mode = 'siam'
                cfg.fix_clst = False
                cfg.clf_reg = True
                cfg.pw = True
                if ((epoch - cfg.epochs_pre_pred) %
                        cfg.tgt_update_period == 0):
                    print('Generating connected components graphs')
                    edges_list = utls.make_edges_ccl(model,
                                                     dataloaders['edges'],
                                                     device,
                                                     return_signed=True)
                    print('Updating target distributions')
                    distrib_buff.do_update(model, dataloaders['all_prev'],
                                           device)

        # save checkpoint
        if (epoch % cfg.cp_period == 0):
            path = pjoin(run_path, 'checkpoints')
            utls.save_checkpoint(
                {
                    'epoch': epoch + 1,
                    'model': model,
                    'best_loss': best_loss,
                },
                False,
                fname_cp=cp_fname,
                fname_bm=best_cp_fname,
                path=path)
        # save previews
        if (epoch % cfg.prev_period == 0) and epoch > 0:
            out_path = rags_prevs_path

            print('generating previews to {}'.format(out_path))

            cfg_ksp.siam_path = pjoin(run_path, 'checkpoints', cp_fname)
            cfg_ksp.use_siam_pred = cfg.clf
            cfg_ksp.use_siam_trans = cfg.pw
            prev_ims = prev_trans_costs.main(cfg_ksp)
            io.imsave(pjoin(out_path, 'ep_{:04d}.png'.format(epoch)), prev_ims)

        print('epoch {}/{}. Mode: {}'.format(epoch, cfg.epochs_dist, mode))
        res = train_one_epoch(model,
                              dataloaders,
                              optimizers,
                              device,
                              distrib_buff,
                              lr_sch,
                              cfg,
                              probas=probas,
                              edges_list=edges_list)

        # write losses to tensorboard
        for k, v in res.items():
            writer.add_scalar(k, v, epoch)

    return model