def main(): # Initialize experiment options and parameters suite = MNISTSparseExperiment() suite.parse_opt() suite.parse_cfg() experiments = suite.options.experiments or suite.cfgparser.sections() pool = multiprocessing.Pool() for expName in experiments: path = suite.get_exp(expName)[0] results = suite.get_exps(path=path) # Build argument list for multiprocessing pool args = [] for exp in results: params = suite.get_params(exp) for i, minWeight in enumerate(np.linspace(0.0, 0.1, 21)): args.append((exp, params, minWeight, -1, i)) args = np.array(args) # Analyze weight pruning alone. No dutycycle pruning args[:, 3] = -1 # set minDutycycle to -1 for all experiments run(pool, expName, "Weight Pruning", args) # Analyze dutycycle pruning units with dutycycle below 5% from target density args[:, 3] = 0.05 # set minDutycycle to 5% for all experiments run(pool, expName, "Dutycycle Pruning (5%)", args) # Analyze dutycycle pruning units with dutycycle below 10% from target density args[:, 3] = 0.10 # set minDutycycle to 10% for all experiments run(pool, expName, "Dutycycle Pruning (10%)", args)
def main(): # Initialize experiment options and parameters suite = MNISTSparseExperiment() suite.parse_opt() suite.parse_cfg() experiments = suite.options.experiments or suite.cfgparser.sections() pool = multiprocessing.Pool() for expName in experiments: path = suite.get_exp(expName)[0] results = suite.get_exps(path=path) # Build argument list for multiprocessing pool args = [] for exp in results: params = suite.get_params(exp) for i, minWeight in enumerate(np.linspace(0.0, 0.1, 21)): args.append((exp, params, minWeight, -1, i)) args = np.array(args) # Analyze weight pruning alone. No dutycycle pruning args[:, 3] = -1 # set minDutycycle to -1 for all experiments run(pool, expName, "weight_prunning", args) # Analyze dutycycle pruning units with dutycycle below 5% from target density args[:, 3] = 0.05 # set minDutycycle to 5% for all experiments run(pool, expName, "dutycycle_pruning", args)
def main(): # Set fixed seed. Only works on cpu random.seed(42) np.random.seed(42) torch.manual_seed(42) dataset = create_union_mnist_dataset() # Load experiment device = torch.device("cuda" if torch.cuda.is_available() else "cpu") suite = MNISTSparseExperiment() suite.parse_opt() suite.parse_cfg() experiments = suite.options.experiments or suite.cfgparser.sections() table = {} progress = tqdm(experiments) for expName in progress: progress.set_description(expName) path = suite.get_exp(expName)[0] results = suite.get_exps(path=path) for exp in results: model_file = os.path.join(exp, "model.pt") if os.path.exists(model_file): model = torch.load(model_file, map_location=device) params = suite.get_params(exp) test_loader = torch.utils.data.DataLoader( dataset, shuffle=True, batch_size=params["test_batch_size"]) table[params['name']] = evaluate(model=model, loader=test_loader, device=device) # Random model test_loader = torch.utils.data.DataLoader(dataset, shuffle=True, batch_size=4) table["random"] = evaluate(model=random_model, loader=test_loader, device=device) # Save results df = pd.DataFrame.from_dict(table) df.to_csv("union_experiment_results.csv") print(tabulate(df, tablefmt='fancy_grid', headers='keys', numalign="right")) # Plot first 100 images in the dataset fig = plt.figure(figsize=(10, 10)) for i in range(100): ax = fig.add_subplot(10, 10, i + 1) ax.set_axis_off() img, label = dataset[i] ax.imshow(img.numpy().reshape((28, 28)), cmap='gray') ax.set_title(str(label.numpy())) plt.tight_layout() plt.savefig("union_experiment_sample_images.png") plt.close()
if __name__ == '__main__': suite = MNISTSparseExperiment() suite.parse_opt() suite.parse_cfg() experiments = suite.options.experiments or suite.cfgparser.sections() paramsTable = [["Network", "L1 F", "L1 Sparsity", "L2 F", "L2 Sparsity", "L3 N", "L3 Sparsity", "Wt Sparsity"]] for name in experiments: exps = suite.get_exps(suite.get_exp(name)[0]) for exp in exps: if not os.path.exists(exp): continue params = suite.get_params(exp=exp) l3_n = params["n"] l3_k = params["k"] l3_sp = "{0:.1f}%".format(100 * float(l3_k) / l3_n) wt_sp = "{0}%".format(100 * float(params["weight_sparsity"])) c1_k = params["c1_k"] if isinstance(c1_k, basestring): c1_k = map(int, c1_k.split("_")) l1_k = c1_k[0] l2_k = c1_k[1]