def add_phase_arguments(parser): phases = parser.add_argument_group(title='phases of compilation') # These options *set* the phases, so they are mutually exclusive. ph_mutex = phases.add_mutually_exclusive_group() ph_mutex.add_argument('-p', '--phases', nargs='+', metavar="PHASE", choices=PHASES, default=PHASES) ph_mutex.add_argument('--scan', action='store_const', dest='phases', const=PHASES[:1], help='shorthand for --phase ' + ' '.join(PHASES[:1])) ph_mutex.add_argument('--parse', action='store_const', dest='phases', const=PHASES[:2], help='shorthand for --phase ' + ' '.join(PHASES[:2])) ph_mutex.add_argument('--imports', action='store_const', dest='phases', const=PHASES[:3], help='shorthand for --phase ' + ' '.join(PHASES[:3])) ph_mutex.add_argument('--macros', action='store_const', dest='phases', const=PHASES[:4], help='shorthand for --phase ' + ' '.join(PHASES[:4])) ph_mutex.add_argument('--types', action='store_const', dest='phases', const=PHASES[:5], help='shorthand for --phase ' + ' '.join(PHASES[:5])) ph_mutex.add_argument('--eval', action='store_const', dest='phases', const=PHASES[:6], help='shorthand for --phase ' + ' '.join(PHASES[:6]))
def init_classifier_compression_arg_parser(): '''Common classifier-compression application command-line arguments. ''' SUMMARY_CHOICES = ['sparsity', 'compute', 'model', 'modules', 'png', 'png_w_params'] parser = argparse.ArgumentParser(description='Distiller image classification model compression') parser.add_argument('data', metavar='DIR', help='path to dataset') parser.add_argument('--arch', '-a', metavar='ARCH', default='resnet18', type=lambda s: s.lower(), choices=models.ALL_MODEL_NAMES, help='model architecture: ' + ' | '.join(models.ALL_MODEL_NAMES) + ' (default: resnet18)') parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', help='number of data loading workers (default: 4)') parser.add_argument('--epochs', type=int, metavar='N', default=90, help='number of total epochs to run (default: 90') parser.add_argument('-b', '--batch-size', default=256, type=int, metavar='N', help='mini-batch size (default: 256)') optimizer_args = parser.add_argument_group('Optimizer arguments') optimizer_args.add_argument('--lr', '--learning-rate', default=0.1, type=float, metavar='LR', help='initial learning rate') optimizer_args.add_argument('--momentum', default=0.9, type=float, metavar='M', help='momentum') optimizer_args.add_argument('--weight-decay', '--wd', default=1e-4, type=float, metavar='W', help='weight decay (default: 1e-4)') parser.add_argument('--print-freq', '-p', default=10, type=int, metavar='N', help='print frequency (default: 10)') parser.add_argument('--verbose', '-v', action='store_true', help='Emit debug log messages') load_checkpoint_group = parser.add_argument_group('Resuming arguments') load_checkpoint_group_exc = load_checkpoint_group.add_mutually_exclusive_group() # TODO(barrh): args.deprecated_resume is deprecated since v0.3.1 load_checkpoint_group_exc.add_argument('--resume', dest='deprecated_resume', default='', type=str, metavar='PATH', help=argparse.SUPPRESS) load_checkpoint_group_exc.add_argument('--resume-from', dest='resumed_checkpoint_path', default='', type=str, metavar='PATH', help='path to latest checkpoint. Use to resume paused training session.') load_checkpoint_group_exc.add_argument('--exp-load-weights-from', dest='load_model_path', default='', type=str, metavar='PATH', help='path to checkpoint to load weights from (excluding other fields) (experimental)') load_checkpoint_group.add_argument('--pretrained', dest='pretrained', action='store_true', help='use pre-trained model') load_checkpoint_group.add_argument('--reset-optimizer', action='store_true', help='Flag to override optimizer if resumed from checkpoint. This will reset epochs count.') parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', help='evaluate model on test set') parser.add_argument('--activation-stats', '--act-stats', nargs='+', metavar='PHASE', default=list(), help='collect activation statistics on phases: train, valid, and/or test' ' (WARNING: this slows down training)') parser.add_argument('--activation-histograms', '--act-hist', type=distiller.utils.float_range_argparse_checker(exc_min=True), metavar='PORTION_OF_TEST_SET', help='Run the model in evaluation mode on the specified portion of the test dataset and ' 'generate activation histograms. NOTE: This slows down evaluation significantly') parser.add_argument('--masks-sparsity', dest='masks_sparsity', action='store_true', default=False, help='print masks sparsity table at end of each epoch') parser.add_argument('--param-hist', dest='log_params_histograms', action='store_true', default=False, help='log the parameter tensors histograms to file ' '(WARNING: this can use significant disk space)') parser.add_argument('--summary', type=lambda s: s.lower(), choices=SUMMARY_CHOICES, action='append', help='print a summary of the model, and exit - options: | '.join(SUMMARY_CHOICES)) parser.add_argument('--export-onnx', action='store', nargs='?', type=str, const='model.onnx', default=None, help='export model to ONNX format') parser.add_argument('--compress', dest='compress', type=str, nargs='?', action='store', help='configuration file for pruning the model (default is to use hard-coded schedule)') parser.add_argument('--sense', dest='sensitivity', choices=['element', 'filter', 'channel'], type=lambda s: s.lower(), help='test the sensitivity of layers to pruning') parser.add_argument('--sense-range', dest='sensitivity_range', type=float, nargs=3, default=[0.0, 0.95, 0.05], help='an optional parameter for sensitivity testing ' 'providing the range of sparsities to test.\n' 'This is equivalent to creating sensitivities = np.arange(start, stop, step)') parser.add_argument('--extras', default=None, type=str, help='file with extra configuration information') parser.add_argument('--deterministic', '--det', action='store_true', help='Ensure deterministic execution for re-producible results.') parser.add_argument('--seed', type=int, default=None, help='seed the PRNG for CPU, CUDA, numpy, and Python') parser.add_argument('--gpus', metavar='DEV_ID', default=None, help='Comma-separated list of GPU device IDs to be used ' '(default is to use all available devices)') parser.add_argument('--cpu', action='store_true', default=False, help='Use CPU only. \n' 'Flag not set => uses GPUs according to the --gpus flag value.' 'Flag set => overrides the --gpus flag') parser.add_argument('--name', '-n', metavar='NAME', default=None, help='Experiment name') parser.add_argument('--out-dir', '-o', dest='output_dir', default='logs', help='Path to dump logs and checkpoints') parser.add_argument('--validation-split', '--valid-size', '--vs', dest='validation_split', type=float_range(exc_max=True), default=0.1, help='Portion of training dataset to set aside for validation') parser.add_argument('--effective-train-size', '--etrs', type=float_range(exc_min=True), default=1., help='Portion of training dataset to be used in each epoch. ' 'NOTE: If --validation-split is set, then the value of this argument is applied ' 'AFTER the train-validation split according to that argument') parser.add_argument('--effective-valid-size', '--evs', type=float_range(exc_min=True), default=1., help='Portion of validation dataset to be used in each epoch. ' 'NOTE: If --validation-split is set, then the value of this argument is applied ' 'AFTER the train-validation split according to that argument') parser.add_argument('--effective-test-size', '--etes', type=float_range(exc_min=True), default=1., help='Portion of test dataset to be used in each epoch') parser.add_argument('--confusion', dest='display_confusion', default=False, action='store_true', help='Display the confusion matrix') parser.add_argument('--num-best-scores', dest='num_best_scores', default=1, type=int, help='number of best scores to track and report (default: 1)') parser.add_argument('--load-serialized', dest='load_serialized', action='store_true', default=False, help='Load a model without DataParallel wrapping it') parser.add_argument('--thinnify', dest='thinnify', action='store_true', default=False, help='physically remove zero-filters and create a smaller model') distiller.quantization.add_post_train_quant_args(parser) return parser
autopsy_logger.info( "*************** Terminating as per user request ***************") exit(2, archive=True) elif signum == signal.SIGUSR1: pdb.set_trace() return exit(1) if __name__ == '__main__': script_loc = os.path.dirname(os.path.realpath(__file__)) print(' '.join(sys.argv)) parser = ArgumentParser(description="Start Autopsy Automation run") req_group = parser.add_argument_group('Required Arguments') req_group.add_argument('--testbed', '--test-bed', help='Testbed to run', type=str, required=True, dest='testbed') req_group.add_argument('--testsuite', '--test-suite', help='Test suite(s)', type=str, required=True, dest='test_suite') opt_group = parser.add_argument_group('Optional Argumets') opt_group.add_argument('--mail',
except Exception as e: print "I am unable to load configuration" print str(e) parser = argparse.ArgumentParser( prog=sys.argv[0], description= 'Utility used to reprocess entire collections on filesystem', epilog='SISTEMA GmbH <http://sistema.at>') parser.add_argument( '--standalone', help= 'Execute the module stand-alone, without going through ingester in das_ing', action='store_true') parser_required = parser.add_argument_group('Required Arguments') parser_required.add_argument( '-f', dest='function', metavar='func', required=True, choices=['verticalIntegration', 'convert'], help='available functions are: verticalIntegration, convert') parser_required.add_argument('-l', dest='label', metavar='label', required=True, help='new label to attach to new filename') parser_ingester = parser.add_argument_group( 'Required Arguments if NOT executed stand-alone')
def add_argument_group(parser, name): arg = parser.add_argument_group(name) return arg