def Argments(): parser = argparse.ArgumentParser( description='Single Shot MultiBox Detector Training With Pytorch') # Train params parser.add_argument('--batch_size', default=33, type=int, help='Batch size for training') parser.add_argument('--num_epochs', default=121, type=int, help='the number epochs') parser.add_argument('--num_workers', default=16, type=int, help='Number of workers used in dataloading') parser.add_argument('--config', default='config/prague_combine_balance.yaml', help='configuration') args = parser.parse_args() print(args) configuration = load_model_configuration(args.config) configuration["flow_control"] = {} variable_dict = vars(args) for key in variable_dict.keys(): configuration["flow_control"][key] = variable_dict[key] return configuration
def Arguments(): parser = argparse.ArgumentParser( description="SSD Evaluation on VOC Dataset.") parser.add_argument( '--net', default="vgg16-ssd", help= "The network architecture, it should be of mb1-ssd, mb1-ssd-lite, mb2-ssd-lite or vgg16-ssd." ) parser.add_argument("--trained_model", type=str) parser.add_argument( "--dataset_type", default="voc", type=str, help='Specify dataset type. Currently support voc and open_images.') parser.add_argument( "--dataset", type=str, help= "The root directory of the VOC dataset or Open Images dataset or coco dataset or ecp dataset." ) parser.add_argument("--label_file", type=str, help="The label file path.") parser.add_argument("--use_cuda", type=str2bool, default=True) parser.add_argument("--use_2007_metric", type=str2bool, default=True) parser.add_argument("--nms_method", type=str, default="hard") parser.add_argument("--iou_threshold", type=float, default=0.5, help="The threshold of Intersection over Union.") parser.add_argument("--eval_dir", default="../experiments/eval_results", type=str, help="The directory to store evaluation results.") parser.add_argument('--mb2_width_mult', default=1.0, type=float, help='Width Multiplifier for MobilenetV2') parser.add_argument('--config', default='config/default_setting.yaml', type=str, help='Configuration') args = parser.parse_args() print(args) configuration = load_model_configuration(args.config) configuration["flow_control"] = {} variable_dict = vars(args) for key in variable_dict.keys(): configuration['flow_control'][key] = variable_dict[key] return configuration
def Argments(): parser = argparse.ArgumentParser( description='Single Shot MultiBox Detector Training With Pytorch') parser.add_argument( "--dataset_type", default="voc", type=str, help= 'Specify dataset type. Currently support voc, open_images, ecp, ecp-random and ecp-centroid.' ) parser.add_argument( '--net', default="vgg17-ssd", help= "The network architecture, it can be mb2-ssd, mb1-lite-ssd, mb2-ssd-lite or vgg16-ssd." ) parser.add_argument('--freeze_base_net', action='store_true', help="Freeze base net layers.") parser.add_argument( '--freeze_net', action='store_true', help="Freeze all the layers except the prediction head.") # Params for SGD # Params for loading pretrained basenet or checkpoints. parser.add_argument('--base_net', help='Pretrained base model') parser.add_argument('--pretrained_ssd', help='Pre-trained base model') parser.add_argument( '--resume', default=None, type=str, help='Checkpoint state_dict file to resume training from') # Train params parser.add_argument('--batch_size', default=33, type=int, help='Batch size for training') parser.add_argument('--num_epochs', default=121, type=int, help='the number epochs') parser.add_argument('--num_workers', default=16, type=int, help='Number of workers used in dataloading') parser.add_argument('--validation_epochs', default=6, type=int, help='the number epochs') parser.add_argument('--debug_steps', default=101, type=int, help='Set the debug log output frequency.') parser.add_argument('--use_cuda', default=True, type=bool, help='Use CUDA to train model') parser.add_argument('--checkpoint_folder', default='../experiments/models', type=str, help='Directory for saving checkpoint models') parser.add_argument('--config', default='config/default_setting.yaml', help='configuration') parser.add_argument('--dataset_ratio', default=0.1, help="Initial set partial dataset ratio") parser.add_argument('--sample_method', type=str, default='random', help="random, sequencial, uncertainty") args = parser.parse_args() print(args) configuration = load_model_configuration(args.config) configuration["flow_control"] = {} variable_dict = vars(args) for key in variable_dict.keys(): configuration["flow_control"][key] = variable_dict[key] return configuration