Example #1
0
def main():

    args = parse_args()
    model_path = pjoin(cfg.checkpoints_folder, args.dataset, args.model)

    params_file, params_dict = parse_model(model_path)

    ctx = mx.gpu()
    train_dataset, val_dataset, eval_metric = get_dataset(args.dataset)

    net = get_model(params_dict['base_model'],
                    pretrained=True,
                    norm_layer=gluon.nn.BatchNorm)
    net.reset_class(classes=train_dataset.classes)
    net.initialize(force_reinit=True)
    net.load_parameters(pjoin(model_path, params_file))

    async_net = net

    if args.type.lower() == 'ssd':
        train_data, val_data = get_ssd_dataloader(
            async_net,
            train_dataset,
            val_dataset,
            params_dict['data_shape'],
            params_dict['batch_size'],
            args.num_workers,
            bilateral_kernel_size=args.bilateral_kernel_size,
            sigma_vals=args.sigma_vals,
            grayscale=args.grayscale)
def main():

    args = parse_args()

    if args.ctx == 'gpu':
        ctx = mx.gpu()
    elif args.ctx == 'cpu':
        ctx = mx.cpu()
    else:
        raise ValueError(f'ctx {args.ctx} invalid.')

    dataset = args.dataset
    model = args.model
    
    _, val_ds, _ = get_dataset(dataset)
    print(f'Prediction {dataset} dataset ({len(val_ds)} examples) with {model} model.')
    net, trans = load_model(args.model_type, model, dataset)
    if args.benchmark:
        times = benchmark(net, val_ds, trans, ctx=ctx)
        folder = pjoin(cfg.gen_data_folder, 'benchmark')
        try:
            os.makedirs(folder)
        except OSError:
            pass
        with open(pjoin(folder, f'{model}_{dataset}_{args.ctx}.pkl'), 'wb') as f:
            pickle.dump(times, f)
    else:
        preds, labels = get_predictions(net, val_ds, trans, ctx=ctx)
        folder = pjoin(cfg.gen_data_folder, 'predictions')
        try:
            os.makedirs(folder)
        except OSError:
            pass
        with open(pjoin(folder, f'{model}_{dataset}.pkl'), 'wb') as f:
            pickle.dump({'preds': preds, 'labels': labels}, f)
Example #3
0
    def test_train_test_different(self):

        for dataset_name in cfg.dataset_names:
            trn_ds, val_ds, _ = get_dataset(dataset_name)
            for fn1 in trn_ds.fns:
                for fn2 in val_ds.fns:
                    self.assertNotEqual(fn1, fn2)
Example #4
0
 def test_split(self):
     
     for dataset_name in cfg.dataset_names:
         if 'mixed' not in dataset_name:
             trn_ds, val_ds, _ = get_dataset(dataset_name)
             trn, val = len(trn_ds), len(val_ds)
             self.assertAlmostEqual(trn/(trn + val), 0.7, places=2)
Example #5
0
from copy import deepcopy as copy
from easydict import EasyDict as edict
from sklearn import metrics
from sklearn.metrics import classification_report
from traindet import get_class_preds, build_confusion_matrix, calc_detection
import seaborn as sns
sns.set_style('whitegrid')

# %%
article_root = pjoin(cfg.data_folder, 'article')
out_folder = pjoin(article_root, 'tables')
outimg_folder = pjoin(article_root, 'figures')

#%%
_, val_ds_real, _ = get_dataset('real')
_, val_ds, _ = get_dataset('synth_small_bg')
print('Validation set sizes:', len(val_ds), len(val_ds_real))

# %%
models = 'ssd_default,yolo_default,frcnn_default'.split(',')
datasets = 'real,synth_small_printer,synth_small_nobg'.split(',')

datasets_form = ['Real', 'Synth', 'Synth No Bg.']
models_form = ['FRCNN', 'SSD', 'YOLOv3']

# %%
data = load_predictions('ssd_default,yolo_default,frcnn_default',
                        'real,synth_small_printer,synth_small_nobg')

Example #6
0
        save_params(net, logger, best_map, current_map, epoch, args.save_interval, args.save_prefix)
        end_epoch_time = time.time()
        logger.info('Epoch time {:.3f}'.format(end_epoch_time - start_epoch_time))
    end_train_time = time.time()
    logger.info('Train time {:.3f}'.format(end_train_time - start_train_time))

if __name__ == '__main__':
    args = parse_args()
    # fix seed for mxnet, numpy and python builtin random generator.
    gutils.random.seed(args.seed)

    # training contexts
    ctx = [mx.gpu(int(i)) for i in args.gpus.split(',') if i.strip()]
    ctx = ctx if ctx else [mx.cpu()]

    train_dataset, val_dataset, eval_metric = get_dataset(args.dataset, args.mixup)
    args.num_samples = len(train_dataset)
    
    if args.transfer:
        net_name = f'transfer_{args.data_shape}_{args.base_model}'
    else:
        net_name = f'scratch_{args.data_shape}_{args.base_model}'

    args.save_prefix += net_name

    if args.transfer:
        net = get_model(args.base_model, pretrained=True)
    else:
        net = get_model(args.base_model, pretrained_base=True)
        net.initialize()
Example #7
0
if __name__ == '__main__':
    import sys

    sys.setrecursionlimit(1100)
    args = parse_args()
    # fix seed for mxnet, numpy and python builtin random generator.
    gutils.random.seed(args.seed)

    # training contexts
    ctx = [mx.gpu(int(i)) for i in args.gpus.split(',') if i.strip()]
    ctx = ctx if ctx else [mx.cpu()]
    args.batch_size = len(ctx)  # 1 batch per device

    # training data
    train_dataset, val_dataset, eval_metric = get_dataset(args.dataset,
                                                          args.mixup,
                                                          tclass=args.tclass)

    if args.transfer:
        net_name = f'transfer_{args.base_model}'
    else:
        net_name = f'scratch_{args.base_model}'

    args.save_prefix += net_name

    if args.transfer:
        net = get_model(args.base_model, pretrained=True, ctx=ctx)
    else:
        net = get_model(args.base_model, pretrained_base=True, ctx=ctx)

    net.reset_class(classes=train_dataset.classes)