def test_get_early_stop_history_list_from_files(self): """ Should load fake EarlyStopHistoryList from pth files. """ plan_fc = [2] net0 = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=plan_fc) net1 = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=plan_fc) history_list = EarlyStopHistoryList() history_list.setup(2, 0) history_list.histories[0].state_dicts[0] = deepcopy(net0.state_dict()) history_list.histories[1].state_dicts[0] = deepcopy(net1.state_dict()) history_list.histories[0].indices[0] = 3 history_list.histories[1].indices[0] = 42 specs = get_specs_lenet_toy() specs.save_early_stop = True specs.net_count = 2 specs.prune_count = 0 with TemporaryDirectory() as tmp_dir_name: # save checkpoints result_saver.save_early_stop_history_list(tmp_dir_name, 'prefix', history_list) # load and validate histories from file experiment_path_prefix = f"{tmp_dir_name}/prefix" loaded_history_list = result_loader.get_early_stop_history_list_from_files( experiment_path_prefix, specs) self.assertEqual(loaded_history_list, history_list) net0.load_state_dict(history_list.histories[0].state_dicts[0]) net1.load_state_dict(history_list.histories[1].state_dicts[0])
def test_save_nets(self): """ Should save two small Lenet instances into pth files. """ plan_fc = [5] net_list = [ Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=plan_fc), Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=plan_fc) ] with TemporaryDirectory() as tmp_dir_name: result_saver.save_nets(tmp_dir_name, 'prefix', net_list) # load and reconstruct nets from their files result_file_path0 = os.path.join(tmp_dir_name, 'prefix-net0.pth') result_file_path1 = os.path.join(tmp_dir_name, 'prefix-net1.pth') for result_file_path in [result_file_path0, result_file_path1]: with open(result_file_path, 'rb') as result_file: checkpoint = t_load(result_file) net = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=plan_fc) net.load_state_dict(checkpoint)
def test_equal_layers_unequal_types(self): """ Should return False, as two layers have unequal activation functions. """ net0 = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[2, 'M'], plan_fc=[2]) net1 = Net(NetNames.CONV, DatasetNames.MNIST, plan_conv=[2, 'M'], plan_fc=[2]) self.assertIs(net0.equal_layers(other=net1), False)
def test_equal_layers_unequal_weights(self): """ Should return False, as two layers contain unequal 'weight'-attributes. """ torch.manual_seed(0) net0 = Net(NetNames.CONV, DatasetNames.MNIST, plan_conv=[2, 'M'], plan_fc=[2]) torch.manual_seed(1) net1 = Net(NetNames.CONV, DatasetNames.MNIST, plan_conv=[2, 'M'], plan_fc=[2]) self.assertIs(net0.equal_layers(other=net1), False)
def test_perform_toy_lenet_experiment(self): """ Should run IMP-Experiment with small Lenet and toy-dataset without errors. """ specs = get_specs_lenet_toy() specs.prune_count = 1 specs.save_early_stop = True early_stop_history = EarlyStopHistory() early_stop_history.setup(specs.prune_count) net = Net(specs.net, specs.dataset, specs.plan_conv, specs.plan_fc) early_stop_history.state_dicts[0] = net.state_dict() early_stop_history.state_dicts[1] = net.state_dict() early_stop_history_list = EarlyStopHistoryList() early_stop_history_list.setup(1, 0) early_stop_history_list.histories[0] = early_stop_history fake_mnist_data_loaders = generate_fake_mnist_data_loaders() with mock.patch('experiments.experiment.get_mnist_data_loaders', return_value=fake_mnist_data_loaders): with TemporaryDirectory( ) as tmp_dir_name: # save results into a temporary folder result_saver.save_specs(tmp_dir_name, 'prefix', specs) result_saver.save_early_stop_history_list( tmp_dir_name, 'prefix', early_stop_history_list) path_to_specs = os.path.join(tmp_dir_name, 'prefix-specs.json') experiment = ExperimentRandomRetrain(path_to_specs, 0, 1) experiment.run_experiment() self.assertEqual( 1, len( glob.glob( os.path.join(tmp_dir_name, 'prefix-random-histories0.npz'))))
def test_raise_error_on_invalid_fc_spec(self): """ The network should raise an assertion error, because plan_fc contains an invalid spec. """ with self.assertRaises(AssertionError): Net(NetNames.CONV, DatasetNames.MNIST, plan_conv=[], plan_fc=['invalid_spec'])
def generate_model_from_state_dict(state_dict, specs): """ Generate a model specified by 'specs' and load the given 'state_dict'. """ net = Net(specs.net, specs.dataset, specs.plan_conv, specs.plan_fc) net.load_state_dict(state_dict) net.prune_net( 0., 0., reset=False) # apply pruned masks, but do not modify the masks return net
def test_equal_layers(self): """ Should return True, as the net is equal to itself. """ net = Net(NetNames.CONV, DatasetNames.MNIST, plan_conv=[2, 'M'], plan_fc=[2]) self.assertIs(net.equal_layers(other=net), True)
def test_forward_pass_cifar10(self): """ The neural network with small Conv architecture should perform a forward pass without exceptions. """ net = Net(NetNames.LENET, DatasetNames.CIFAR10, plan_conv=[2, '2', 'M', '2B', 'A'], plan_fc=['4', 2]) input_sample = torch.rand(1, 3, 32, 32) net(input_sample)
def test_forward_pass_mnist(self): """ The neural network with one hidden layer should perform a forward pass for without exceptions. """ net = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=[2]) input_sample = torch.rand(1, 1, 28, 28) net(input_sample)
def test_sparsity_report_initial_weights(self): """ The convolutional neural network should be fully connected right after initialization. """ net = Net(NetNames.CONV, DatasetNames.CIFAR10, plan_conv=[8, 'M', 16, 'A'], plan_fc=[32, 16]) np.testing.assert_array_equal(np.ones(6, dtype=float), net.sparsity_report())
def test_weight_count(self): """ The CNN should have the right weight counts. conv = conv1 + conv2 = 3*9*2 + 2*9*2 = 90 fc = hid1 + hid2 + out = (16*16*2)*4 + 4*2 + 2*10 = 2076 """ net = Net(NetNames.CONV, DatasetNames.CIFAR10, plan_conv=[2, 2, 'M'], plan_fc=[4, 2]) self.assertEqual(dict([('conv', 90), ('fc', 2076)]), net.init_weight_count_net)
def test_sparsity_report_after_single_prune_lenet_300_100(self): """ Should prune each layer with the given pruning rate, except for the last layer (half fc pruning-rate). total_weights = (28*28*300) + (300*100) + (100*10) = 266200 sparsity = ((28*28*300)*0.9 + (300*100)*0.9 + (100*10)*0.95) / 266200 ~ 0.9002 """ net = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=[300, 100]) net.prune_net(prune_rate_conv=0.0, prune_rate_fc=0.1) np.testing.assert_array_equal(np.array([0.9002, 0.9, 0.9, 0.95]), net.sparsity_report())
def generate_randomly_reinitialized_net(specs, state_dict): """ Build a net from 'state_dict' and randomly reinitialize its weights. The net has the same masks like the net specified by 'state_dict'. """ assert isinstance( specs, ExperimentSpecs ), f"'specs' needs to be ExperimentSpecs, but is {type(specs)}." net = Net(specs.net, specs.dataset, specs.plan_conv, specs.plan_fc) net.load_state_dict(state_dict) net.apply(gaussian_glorot) net.store_initial_weights() net.prune_net(0.0, 0.0) return net
def test_save_early_stop_history_list(self): """ Should save two fake EarlyStopHistories into two pth files. """ plan_fc = [2] net0 = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=plan_fc) net1 = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=plan_fc) history_list = EarlyStopHistoryList() history_list.setup(2, 0) history_list.histories[0].state_dicts[0] = deepcopy(net0.state_dict()) history_list.histories[1].state_dicts[0] = deepcopy(net1.state_dict()) history_list.histories[0].indices[0] = 3 history_list.histories[1].indices[0] = 42 with TemporaryDirectory() as tmp_dir_name: result_saver.save_early_stop_history_list( tmp_dir_name, 'prefix', history_list) # save checkpoints # load and validate histories from file result_file_path0 = os.path.join(tmp_dir_name, 'prefix-early-stop0.pth') result_file_path1 = os.path.join(tmp_dir_name, 'prefix-early-stop1.pth') for net_num, result_file_path in enumerate( [result_file_path0, result_file_path1]): with open(result_file_path, 'rb') as result_file: reconstructed_hist = t_load(result_file) net = Net(NetNames.LENET, DatasetNames.MNIST, plan_conv=[], plan_fc=plan_fc) np.testing.assert_array_equal( reconstructed_hist.indices, history_list.histories[net_num].indices) net.load_state_dict(reconstructed_hist.state_dicts[0])
def test_sparsity_report_after_single_prune_conv2(self): """ Should prune each layer with the given pruning rate, except for the last layer (half fc pruning-rate). total_weights = conv+fc = 38592+4262400 = 4300992 sparsity = (38592*0.9 + (16*16*64*256 + 256*256)*0.8 + (256*10)*0.9) / 4300992 ~ 0.8010 """ net = Net(NetNames.CONV, DatasetNames.MNIST, plan_conv=[64, 64, 'M'], plan_fc=[256, 256]) net.prune_net(prune_rate_conv=0.1, prune_rate_fc=0.2) np.testing.assert_almost_equal(np.array( [0.801, 0.9, 0.9, 0.8, 0.8, 0.9]), net.sparsity_report(), decimal=3)
def test_get_net_from_file(self): """ Should load two small Conv instances from pth files. """ specs = get_specs_conv_toy() specs.plan_conv = [2, 'M'] specs.plan_fc = [2] net_list = [ Net(specs.net, specs.dataset, specs.plan_conv, specs.plan_fc), Net(specs.net, specs.dataset, specs.plan_conv, specs.plan_fc) ] with TemporaryDirectory() as tmp_dir_name: # save nets result_saver.save_nets(tmp_dir_name, 'prefix', net_list) # load and reconstruct nets from their files experiment_path_prefix = f"{tmp_dir_name}/prefix" loaded_nets = result_loader.get_models_from_files( experiment_path_prefix, specs) self.assertIsInstance(loaded_nets[0], Net) self.assertIsInstance(loaded_nets[1], Net) self.assertEqual(DatasetNames.CIFAR10, loaded_nets[0].dataset_name) self.assertEqual(DatasetNames.CIFAR10, loaded_nets[1].dataset_name) self.assertEqual(NetNames.CONV, loaded_nets[0].net_name) self.assertEqual(NetNames.CONV, loaded_nets[1].net_name)
def test_generate_randomly_reinitialized_net(self): """ Should generate a network with equal masks but different weights. """ specs = experiment_specs.get_specs_lenet_mnist() specs.save_early_stop = True torch.manual_seed(0) net = Net(specs.net, specs.dataset, specs.plan_conv, specs.plan_fc) torch.manual_seed(1) new_net = ExperimentRandomRetrain.generate_randomly_reinitialized_net( specs, net.state_dict()) self.assertIs(net.fc[0].weight.eq(new_net.fc[0].weight).all().item(), False) self.assertIs( net.fc[0].weight_mask.eq(new_net.fc[0].weight_mask).all().item(), True) self.assertIs( net.out.weight.eq(new_net.out.weight).all().item(), False) self.assertIs( net.out.weight_mask.eq(new_net.out.weight_mask).all().item(), True)
def test_get_trained_instance(self): """ The pruned and trained network should return a trained copy of itself. """ net = Net(NetNames.CONV, DatasetNames.CIFAR10, plan_conv=[2, 'M'], plan_fc=[2]) net.conv[0].weight.add_(0.5) net.fc[0].weight.add_(0.5) net.prune_net(prune_rate_conv=0.0, prune_rate_fc=0.1, reset=False) new_net = net.get_new_instance(reset_weight=False) np.testing.assert_array_equal(net.sparsity_report(), new_net.sparsity_report()) self.assertEqual(NetNames.CONV, new_net.net_name) self.assertEqual(DatasetNames.CIFAR10, new_net.dataset_name) self.assertIs( torch.equal(new_net.conv[0].weight, net.conv[0].weight.mul(net.conv[0].weight_mask)), True) self.assertIs( torch.equal(new_net.fc[0].weight, net.fc[0].weight.mul(net.fc[0].weight_mask)), True)
START_FROM = 80000 MEMORY_SIZE = 80000 BATCH_SIZE = 32 UPDATE_NET_EVERY = 1000 STACK_FRAMES = 4 CREATE_NEW_NET = False LOAD_NET_PATH = 'data/net.torch' SAVE_NET_PATH = 'data/net.torch' env = PongEnv(shape=INPUT_SHAPE, stack_frames=STACK_FRAMES) device = 'cuda' if torch.cuda.is_available() else 'cpu' if CREATE_NEW_NET: net = Net(input_shape=INPUT_SHAPE, input_channels=STACK_FRAMES, output_size=env.action_space.n) else: net = torch.load(LOAD_NET_PATH) target_net = Net(input_shape=INPUT_SHAPE, input_channels=STACK_FRAMES, output_size=env.action_space.n) epsilon_tracker = LinearEpsilonTracker(EPS_START, EPS_END, EPS_DECAY_TO) memory = Memory(maxlen=MEMORY_SIZE) agent = DqnAgent(env=env, device=device, lr=LEARNING_RATE, gamma=DECAY_RATE, batch_size=BATCH_SIZE, net=net, target_net=target_net,
def test_raise_error_on_invalid_dataset_name(self): """ The network should raise an assertion error, because 'dataset_name' is invalid. """ with self.assertRaises(AssertionError): # noinspection PyTypeChecker Net(NetNames.LENET, 'Invalid name', plan_conv=[], plan_fc=[])
def init_nets(self): """ Initialize nets which are used during the experiment. """ for n in range(self.specs.net_count): self.nets.append(Net(self.specs.net, self.specs.dataset, self.specs.plan_conv, self.specs.plan_fc)) log_detailed_only(self.specs.verbosity, self.nets[0])