'mlp_decomp': mlp_decomp_model, 'mlp_prune': mlp_prune_model, 'sparse_mlp': sparse_mlp_model, 'debug_sparse_mlp': debug_sparse_mlp_model, 'debug_sparse_mlp_decomposition': debug_sparse_mlp_decomposition_model, 'debug_sparse_mlp_prune': debug_sparse_mlp_prune_model, # Add more model_type functions here. } if __name__ == '__main__': # it's hard to init flags correctly... so here it is sys.argv.append('--caffe2_keep_on_shrink') # FbcodeArgumentParser calls initFacebook which is necessary for NNLoader # initialization parser = pyinit.FbcodeArgumentParser(description='Ads NN trainer') # arguments starting with single '-' are compatible with Lua parser.add_argument("-batchSize", type=int, default=100, help="The batch size of benchmark data.") parser.add_argument("-loaderConfig", type=str, help="Json file with NNLoader's config. If empty some " "fake data is used") parser.add_argument("-meta", type=str, help="Meta file (deprecated)") parser.add_argument("-hidden", type=str, help="A dash-separated string specifying the " "model dimensions without the output layer.")
MODEL_TYPE_FUNCTIONS = { 'AlexNet': AlexNet, 'AlexNet_Prune': AlexNet_Prune, 'VGG': VGG, 'ResNet-110': ResNet110, 'ResNet-20': ResNet20 } if __name__ == '__main__': # it's hard to init flags correctly... so here it is sys.argv.append('--caffe2_keep_on_shrink') # FbcodeArgumentParser calls initFacebook which is necessary for NNLoader # initialization parser = pyinit.FbcodeArgumentParser(description='cifar-10 Tutorial') # arguments starting with single '-' are compatible with Lua parser.add_argument("--model", type=str, default='AlexNet', choices=MODEL_TYPE_FUNCTIONS.keys(), help="The batch size of benchmark data.") parser.add_argument("--prune_thres", type=float, default=0.0001, help="Pruning threshold for FC layers.") parser.add_argument("--comp_lb", type=float, default=0.02, help="Compression Lower Bound for FC layers.") parser.add_argument("--gpu", default=False, help="Whether to run on gpu", type=bool) parser.add_argument("--train_input_path", type=str, default=None, required=True, help="Path to the database for training data")