dataset_train, dataset_test = get_dataset(args) time_limits = 5 * 60 * 60 # 5 days epochs = 12 # use coco pre-trained model for custom datasets transfer = None if ('voc' in args.dataset_name) or ( 'coco' in args.dataset_name) else 'coco' detector = task.fit(dataset_train, num_trials=30, epochs=epochs, net=ag.Categorical('darknet53', 'mobilenet1.0'), meta_arch=args.meta_arch, lr=ag.Categorical(1e-2, 5e-3, 1e-3, 5e-4, 1e-4, 5e-5), transfer=transfer, data_shape=ag.Categorical(320, 416), nthreads_per_trial=16, ngpus_per_trial=1, batch_size=8, lr_decay_epoch=ag.Categorical('80,90', '85,95'), warmup_epochs=ag.Int(1, 10), warmup_iters=ag.Int(250, 1000), wd=ag.Categorical(1e-4, 5e-4, 2.5e-4), syncbn=ag.Bool(), label_smooth=ag.Bool(), time_limits=time_limits, dist_ip_addrs=[]) test_map = detector.evaluate(dataset_test) print("mAP on test dataset: {}".format(test_map[1][1]))
ag.Categorical(5e-3, 2e-3, 5e-4, 2e-4), 'batch_size': 4, 'epochs': ag.Categorical(30, 40, 50, 60), 'nthreads_per_trial': 16, 'ngpus_per_trial': 4, 'data_shape': [640, 800], 'lr_decay_epoch': ag.Categorical([24, 28], [35], [50, 55], [40], [45], [55], [30, 35], [20]), # 'warmup_epochs': 2, 'warmup_iters': ag.Int(5, 500), 'wd': ag.Categorical(5e-4, 2.5e-4, 1e-4), 'transfer': transfer, 'label_smooth': False, 'syncbn': ag.Bool(), 'num_trials': 30, 'time_limits': time_limits, 'dist_ip_addrs': None }
False, 'data_shape': (640, 800), 'nthreads_per_trial': 12, 'verbose': False, 'ngpus_per_trial': 4, 'batch_size': 4, 'hybridize': True, 'lr_decay_epoch': ag.Categorical([24, 28], [35], [50, 55], [40], [45], [55], [30, 35], [20]), 'warmup_iters': ag.Int(5, 500), 'resume': False, 'checkpoint': 'checkpoint/exp1.ag', 'visualizer': 'none', 'start_epoch': 0, 'lr_mode': 'step', 'lr_decay': 0.1, 'lr_decay_period': 0, 'warmup_lr':
parser.add_argument('--index-file-name-trainval', type=str, default='', help="name of txt file which contains images for training and validation ") parser.add_argument('--index-file-name-test', type=str, default='', help="name of txt file which contains images for testing") parser.add_argument('--classes', type=tuple, default=None, help="classes for custom classes") parser.add_argument('--no-redownload', action='store_true', help="whether need to re-download dataset") args = parser.parse_args() logging.info('args: {}'.format(args)) dataset_train, dataset_test = get_dataset(args) time_limits = 5*60*60 # 5 days epochs = 1 detector = task.fit(dataset_train, num_trials=30, epochs=epochs, net=ag.Categorical('darknet53', 'mobilenet1.0'), lr=ag.Categorical(1e-2, 5e-3, 1e-3, 5e-4, 1e-4, 5e-5), data_shape=ag.Categorical(320, 416), ngpus_per_trial=1, batch_size=8, lr_decay_epoch=ag.Categorical('80,90','85,95'), warmup_epochs=ag.Int(1, 10), syncbn=ag.Bool(), label_smooth=ag.Bool(), time_limits=time_limits, dist_ip_addrs = []) test_map = detector.evaluate(dataset_test) print("mAP on test dataset: {}".format(test_map[1][1]))