Example #1
0
def main():
    parser = argparse.ArgumentParser(description="ReID Baseline Inference")
    parser.add_argument("--config_file",
                        default="",
                        help="path to config file",
                        type=str)
    parser.add_argument("opts",
                        help="Modify config options using the command-line",
                        default=None,
                        nargs=argparse.REMAINDER)

    args = parser.parse_args()

    num_gpus = int(
        os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1

    if args.config_file != "":
        cfg.merge_from_file(args.config_file)
    cfg.merge_from_list(args.opts)
    cfg.freeze()

    output_dir = cfg.OUTPUT_DIR
    if output_dir and not os.path.exists(output_dir):
        mkdir(output_dir)

    logger = setup_logger("reid_baseline", output_dir, 0)
    logger.info("Using {} GPUS".format(num_gpus))
    logger.info(args)

    if args.config_file != "":
        logger.info("Loaded configuration file {}".format(args.config_file))
        with open(args.config_file, 'r') as cf:
            config_str = "\n" + cf.read()
            logger.info(config_str)
    logger.info("Running with config:\n{}".format(cfg))

    cudnn.benchmark = True
    num_classes = 128
    test_loader, num_query = make_test_loader(cfg)
    model = build_model(cfg, num_classes)
    model.load_state_dict(torch.load(cfg.TEST.WEIGHT))

    test_inference(cfg, model, test_loader, num_query)
Example #2
0
parser = NeonArgparser(__doc__, default_config_files=config_files)
args = parser.parse_args()

assert args.model_file is not None, "need a model file for testing"
model = Model(args.model_file)

assert 'test' in args.manifest, "Missing test manifest"
assert 'categories' in args.manifest, "Missing categories file"

category_map = {
    t[0].decode(): t[1]
    for t in np.genfromtxt(
        args.manifest['categories'], dtype=None, delimiter=',')
}

test = make_test_loader(args.manifest['test'], args.manifest_root, model.be)

clip_pred = model.get_outputs(test)

video_pred = accumulate_video_pred(args.manifest['test'], args.manifest_root,
                                   clip_pred)

correct = np.zeros((len(video_pred), 2))

TOP1, TOP5 = 0, 1  # indices in correct count array (for readability)

for idx, (video_name, (label,
                       prob_list)) in enumerate(list(video_pred.items())):
    # Average probabilities for each clip
    tot_prob = np.sum(prob_list, axis=0)
    label_idx = category_map[label]
Example #3
0
parser.add_argument('--subset_pct',
                    type=float,
                    default=100,
                    help='subset of training dataset to use (percentage)')
args = parser.parse_args()

random_seed = 0 if args.rng_seed is None else args.rng_seed
model, cost = create_network()

# setup data provider
assert 'train' in args.manifest, "Missing train manifest"
assert 'test' in args.manifest, "Missing validation manifest"

train = make_train_loader(args.manifest['train'], args.manifest_root, model.be,
                          args.subset_pct, random_seed)
valid = make_test_loader(args.manifest['test'], args.manifest_root, model.be,
                         args.subset_pct)

# setup callbacks
callbacks = Callbacks(model, eval_set=valid, **args.callback_args)

# gradient descent with momentum, weight decay, and learning rate decay schedule
learning_rate_sched = Schedule(list(range(6, args.epochs, 6)), 0.1)
opt_gdm = GradientDescentMomentum(0.003,
                                  0.9,
                                  wdecay=0.005,
                                  schedule=learning_rate_sched)
opt_biases = GradientDescentMomentum(0.006, 0.9, schedule=learning_rate_sched)
opt = MultiOptimizer({'default': opt_gdm, 'Bias': opt_biases})

# train model
model.fit(train,
Example #4
0
test_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test.cfg')
config_files = [test_config] if os.path.exists(test_config) else []

parser = NeonArgparser(__doc__, default_config_files=config_files)
args = parser.parse_args()

assert args.model_file is not None, "need a model file for testing"
model = Model(args.model_file)

assert 'test' in args.manifest, "Missing test manifest"
assert 'categories' in args.manifest, "Missing categories file"

category_map = {t[0].decode(): t[1] for t in np.genfromtxt(args.manifest['categories'],
                                                           dtype=None, delimiter=',')}

test = make_test_loader(args.manifest['test'], args.manifest_root, model.be)

clip_pred = model.get_outputs(test)

video_pred = accumulate_video_pred(args.manifest['test'], args.manifest_root, clip_pred)

correct = np.zeros((len(video_pred), 2))

TOP1, TOP5 = 0, 1  # indices in correct count array (for readability)

for idx, (video_name, (label, prob_list)) in enumerate(list(video_pred.items())):
    # Average probabilities for each clip
    tot_prob = np.sum(prob_list, axis=0)
    label_idx = category_map[label]

    correct[idx, TOP1] = (label_idx == tot_prob.argmax())
Example #5
0
parser = NeonArgparser(__doc__, default_config_files=config_files)
parser.add_argument('--subset_pct', type=float, default=100,
                    help='subset of training dataset to use (percentage)')
args = parser.parse_args()

random_seed = 0 if args.rng_seed is None else args.rng_seed
model, cost = create_network()

# setup data provider
assert 'train' in args.manifest, "Missing train manifest"
assert 'test' in args.manifest, "Missing validation manifest"

train = make_train_loader(args.manifest['train'], args.manifest_root, model.be, args.subset_pct,
                          random_seed)
valid = make_test_loader(args.manifest['test'], args.manifest_root, model.be, args.subset_pct)

# setup callbacks
callbacks = Callbacks(model, eval_set=valid, **args.callback_args)

# gradient descent with momentum, weight decay, and learning rate decay schedule
learning_rate_sched = Schedule(list(range(6, args.epochs, 6)), 0.1)
opt_gdm = GradientDescentMomentum(0.003, 0.9, wdecay=0.005, schedule=learning_rate_sched)
opt_biases = GradientDescentMomentum(0.006, 0.9, schedule=learning_rate_sched)
opt = MultiOptimizer({'default': opt_gdm, 'Bias': opt_biases})

# train model
model.fit(train, optimizer=opt, num_epochs=args.epochs, cost=cost, callbacks=callbacks)

# output accuracies
neon_logger.display('Train Accuracy = %.1f%%' % (model.eval(train, metric=Accuracy()) * 100))