示例#1
0
    def test_default(self):
        """Evaluate if ResNet50 initializes and runs"""
        net = resnet50(config=dict(num_classes=10))
        net(Variable(torch.randn(2, 3, 32, 32)))

        self.assertIsInstance(net, ResNet,
                              "Loads ResNet50 with default parameters")
示例#2
0
 def test_name_not_included(self):
     """
     Test the case when a param name does not exist in the network.
     """
     resnet = resnets.resnet50(num_classes=10)
     named_params = filter_params(resnet, include_names=["classifier"])
     self.assertEqual(len(named_params), 0)
示例#3
0
    def test_create_optimizer_for_resnet(self):
        """
        Filter out only the linear params of resnet.
        """
        resnet = resnets.resnet50(num_classes=10)

        optim = create_optimizer(config, resnet)
        self.assertEqual(len(optim.param_groups), 3)

        # Group 0 - reduced lr on stem
        lr = optim.param_groups[0]["lr"]
        num_params = len(optim.param_groups[0]["params"])
        weight_decay = optim.param_groups[0]["weight_decay"]
        self.assertEqual(lr, 0.001)
        self.assertEqual(num_params, 1)
        self.assertEqual(weight_decay, 0.1)

        # Group 1 - no weight decay on batch norm and bias params
        lr = optim.param_groups[1]["lr"]
        num_params = len(optim.param_groups[1]["params"])
        weight_decay = optim.param_groups[1]["weight_decay"]
        self.assertEqual(lr, 0.01)
        self.assertEqual(num_params, 107)
        self.assertEqual(weight_decay, 0)

        # Group 3: The remaining params; used default optim args.
        lr = optim.param_groups[2]["lr"]
        weight_decay = optim.param_groups[2]["weight_decay"]
        self.assertEqual(lr, 0.01)
        self.assertEqual(weight_decay, 0.1)
 def test_filter_out_resnet_linear_params(self):
     """
     Filter out only the linear params of resnet.
     """
     resnet = resnets.resnet50(num_classes=10)
     named_params = filter_params(resnet, include_modules=[torch.nn.Linear])
     self.assertTrue(len(named_params) == 2)
     self.assertTrue("classifier.weight" in named_params)
     self.assertTrue("classifier.bias" in named_params)
 def test_filter_out_resnet_linear_params(self):
     """
     Filter out only the linear params of resnet.
     """
     resnet = resnets.resnet50(num_classes=10)
     named_modules = filter_modules(resnet,
                                    include_modules=[torch.nn.Linear])
     self.assertEqual(len(named_modules), 1)
     self.assertIn("classifier", named_modules)
    def test_params_count(self):
        """
        Test the number of non-zero parameters for default dense and sparse networks
        """
        dense_net = resnet50(config=dict(num_classes=10))
        dense_net(Variable(torch.randn(2, 3, 32, 32)))

        sparse_net = resnet50(config=dict(num_classes=10, defaults_sparse=True))
        sparse_net(Variable(torch.randn(2, 3, 32, 32)))

        total_params_dense, total_nonzero_params_dense = count_nonzero_params(dense_net)
        self.assertGreater(total_params_dense, 23500000)
        self.assertGreaterEqual(total_params_dense, total_nonzero_params_dense)

        params_sparse, nonzero_params_sparse = count_nonzero_params(sparse_net)

        self.assertEqual(params_sparse, total_params_dense)
        self.assertLess(nonzero_params_sparse, 10000000)
示例#7
0
    def test_include_name(self):
        """
        Test use of `include_names`.
        """

        resnet = resnets.resnet50(num_classes=10)
        named_params = filter_params(resnet, include_names=["classifier.weight"])
        self.assertEqual(len(named_params), 1)
        self.assertIn("classifier.weight", named_params)
        self.assertEqual(named_params["classifier.weight"].shape, (10, 2048))
    def test_include_name(self):
        """
        Test use of `include_names`.
        """

        resnet = resnets.resnet50(num_classes=10)
        named_modules = filter_modules(resnet, include_names=["classifier"])
        self.assertEqual(len(named_modules), 1)
        self.assertIn("classifier", named_modules)
        self.assertIsInstance(named_modules["classifier"], torch.nn.Linear)
    def test_default_sparse(self):
        """Create the default sparse network"""
        net = resnet50(config=dict(num_classes=10, defaults_sparse=True))
        net(Variable(torch.randn(2, 3, 32, 32)))
        self.assertIsInstance(net, ResNet, "ResNet50 with default sparse parameters")

        # Test on CUDA if available
        if torch.cuda.is_available():
            net.cuda()
            x = Variable(torch.randn(16, 3, 224, 224))
            net(x.cuda())
示例#10
0
    def test_get_conv_modules_by_pattern_and_type(self):
        """
        Ensure `include_patterns` and `include_modules` yields the same result
        when they are meant to identify the same params.
        """
        resnet = resnets.resnet50(num_classes=10)
        named_params1 = filter_params(
            resnet, include_patterns=["classifier"])
        self.assertEqual(len(named_params1), 2)  # linear weight + bias

        named_params2 = filter_params(
            resnet, include_modules=[torch.nn.Linear])
        self.assertEqual(len(named_params2), 2)

        names1 = list(named_params1.keys())
        names2 = list(named_params2.keys())
        self.assertEqual(names1, names2)
    def test_get_conv_modules_by_pattern_and_type(self):
        """
        Ensure `include_patterns` and `include_modules` yields the same result
        when they are meant to identify the same params.
        """
        resnet = resnets.resnet50(num_classes=10)

        include_pooling_layers = ["features\\..*pool.*"]
        named_modules1 = filter_modules(
            resnet, include_patterns=include_pooling_layers)
        self.assertEqual(len(named_modules1), 2)

        pooling_layers_types = [
            torch.nn.modules.pooling.AdaptiveAvgPool2d,
            torch.nn.modules.pooling.MaxPool2d,
        ]
        named_modules2 = filter_modules(resnet,
                                        include_modules=pooling_layers_types)
        self.assertEqual(len(named_modules2), 2)

        names1 = list(named_modules1.keys())
        names2 = list(named_modules2.keys())
        self.assertEqual(names1, names2)
示例#12
0
train_dataloader = utils.data.DataLoader(train_dataset,
                                         batch_size=128,
                                         shuffle=True,
                                         num_workers=4)
print("Loaded train dataloader")
test_dataloader = utils.data.DataLoader(test_dataset,
                                        batch_size=128,
                                        shuffle=True,
                                        num_workers=4)
print("Loaded test dataloader")
t1 = time()
print("Time spent to load dataloaders: {:.2f}".format(t1 - t0))

# load network
t0 = time()
network = resnet50(config=dict(num_classes=num_classes))
# from torch import nn
# from torchvision import models
# network = models.resnet50(pretrained=False)
# last_layer_shape = network.fc.weight.shape
# network.fc = nn.Linear(last_layer_shape[1], num_classes)

print("Loaded network")
t1 = time()
print("Time spent to load network: {:.2f}".format(t1 - t0))

# ------------------------- RUN MODEL

# simple base model
t0 = time()
exp_config = dict(device="cuda")