Example #1
0
    def test_MultiLayerEncoder_named_children(self):
        modules = [(str(idx), nn.Module()) for idx in range(3)]
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        actual = tuple(multi_layer_encoder.children_names())
        desired = tuple(zip(*modules))[0]
        self.assertTupleEqual(actual, desired)
Example #2
0
def test_MultiOperatorLoss_call_encode(forward_pass_counter):
    class TestOperator(ops.EncodingOperator):
        def __init__(self, encoder, **kwargs):
            super().__init__(**kwargs)
            self._encoder = encoder

        @property
        def encoder(self):
            return self._encoder

        def forward(self, image):
            return torch.sum(self.encoder(image))

    modules = (("count", forward_pass_counter), )
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    ops_ = [(
        str(idx),
        TestOperator(multi_layer_encoder.extract_encoder("count")),
    ) for idx in range(3)]
    multi_op_loss = loss.MultiOperatorLoss(ops_)

    torch.manual_seed(0)
    input = torch.rand(1, 3, 128, 128)

    multi_op_loss(input)
    actual = forward_pass_counter.count
    desired = 1
    assert actual == desired

    multi_op_loss(input)
    actual = forward_pass_counter.count
    desired = 2
    assert actual == desired
Example #3
0
    def test_MultiLayerEncoder_encode(self):
        torch.manual_seed(0)
        count = ForwardPassCounter()
        conv = nn.Conv2d(3, 1, 1)
        relu = nn.ReLU(inplace=False)
        input = torch.rand(1, 3, 128, 128)

        modules = (("count", count), ("conv", conv), ("relu", relu))
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        layers = ("conv", "relu")
        multi_layer_encoder.registered_layers.update(layers)
        multi_layer_encoder.encode(input)
        encs = multi_layer_encoder(input, layers)

        actual = encs[0]
        desired = conv(input)
        self.assertTensorAlmostEqual(actual, desired)

        actual = encs[1]
        desired = relu(conv(input))
        self.assertTensorAlmostEqual(actual, desired)

        actual = count.count
        desired = 1
        self.assertEqual(actual, desired)
def mle_and_modules(module_factory):
    shallow = module_factory()
    intermediate = module_factory()
    deep = module_factory()
    modules = [("shallow", shallow), ("intermediate", intermediate), ("deep", deep)]
    mle = enc.MultiLayerEncoder(modules)
    return mle, dict(modules)
Example #5
0
def test_MultiLayerEncoder_encode(forward_pass_counter):
    torch.manual_seed(0)
    conv = nn.Conv2d(3, 1, 1)
    relu = nn.ReLU(inplace=False)
    input = torch.rand(1, 3, 128, 128)

    modules = (("count", forward_pass_counter), ("conv", conv), ("relu", relu))
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    layers = ("conv", "relu")
    multi_layer_encoder.registered_layers.update(layers)
    multi_layer_encoder.encode(input)
    encs = multi_layer_encoder(input, layers)

    actual = encs[0]
    desired = conv(input)
    ptu.assert_allclose(actual, desired)

    actual = encs[1]
    desired = relu(conv(input))
    ptu.assert_allclose(actual, desired)

    actual = forward_pass_counter.count
    desired = 1
    assert actual == desired
Example #6
0
def test_MultiOperatorLoss_trim():
    class TestOperator(ops.EncodingOperator):
        def __init__(self, encoder, **kwargs):
            super().__init__(**kwargs)
            self._encoder = encoder

        @property
        def encoder(self):
            return self._encoder

        def forward(self, image):
            pass

    layers = [str(idx) for idx in range(3)]
    modules = [(layer, nn.Module()) for layer in layers]
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    ops_ = ((
        "op",
        TestOperator(multi_layer_encoder.extract_encoder(layers[0])),
    ), )
    loss.MultiOperatorLoss(ops_, trim=True)

    assert layers[0] in multi_layer_encoder
    assert all(layer not in multi_layer_encoder for layer in layers[1:])
Example #7
0
    def test_MultiLayerEncoder(self):
        modules = [(str(idx), nn.Module()) for idx in range(3)]
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        for name, module in modules:
            actual = getattr(multi_layer_encoder, name)
            desired = module
            self.assertIs(actual, desired)
Example #8
0
def test_MultiLayerEncoder_contains():
    idcs = (0, 2)
    modules = [(str(idx), nn.Module()) for idx in idcs]
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    for idx in idcs:
        assert str(idx) in multi_layer_encoder

    for idx in set(range(max(idcs))) - set(idcs):
        assert str(idx) not in multi_layer_encoder
Example #9
0
    def test_MultiLayerEncoder_contains(self):
        idcs = (0, 2)
        modules = [(str(idx), nn.Module()) for idx in idcs]
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        for idx in idcs:
            self.assertTrue(str(idx) in multi_layer_encoder)

        for idx in set(range(max(idcs))) - set(idcs):
            self.assertFalse(str(idx) in multi_layer_encoder)
Example #10
0
def test_MultiLayerEncoder_named_children_from():
    layers = [str(idx) for idx in range(3)]
    modules = [(layer, nn.Module()) for layer in layers]
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    actuals = multi_layer_encoder.named_children_from(layers[-2])
    desireds = modules[1:]
    assert_named_modules_identical(actuals, desireds)

    actuals = multi_layer_encoder.named_children_from(layers[-2], include_first=False)
    desireds = modules[2:]
    assert_named_modules_identical(actuals, desireds)
Example #11
0
def test_get_layer_weights_wrong_layers(subtests):
    multi_layer_encoder = enc.MultiLayerEncoder((("relu", nn.ReLU()), ))

    with subtests.test("layer not in multi_layer_encoder"):
        with pytest.raises(ValueError):
            paper.compute_layer_weights(
                ("not_included", ), multi_layer_encoder=multi_layer_encoder)

    with subtests.test("no out_channels"):
        with pytest.raises(RuntimeError):
            paper.compute_layer_weights(
                ("relu", ), multi_layer_encoder=multi_layer_encoder)
Example #12
0
    def test_MultiLayerEncoder_named_children_to(self):
        layers = [str(idx) for idx in range(3)]
        modules = [(layer, nn.Module()) for layer in layers]
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        actuals = multi_layer_encoder.named_children_to(layers[-2])
        desireds = modules[:-2]
        self.assertNamedChildrenEqual(actuals, desireds)

        actuals = multi_layer_encoder.named_children_to(layers[-2],
                                                        include_last=True)
        desireds = modules[:-1]
        self.assertNamedChildrenEqual(actuals, desireds)
Example #13
0
def test_SingleLayerEncoder_call(input):
    torch.manual_seed(0)
    conv = nn.Conv2d(3, 1, 1)
    relu = nn.ReLU(inplace=False)

    modules = (("conv", conv), ("relu", relu))
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    single_layer_encoder = enc.SingleLayerEncoder(multi_layer_encoder, "relu")

    actual = single_layer_encoder(input)
    expected = relu(conv(input))
    ptu.assert_allclose(actual, expected)
Example #14
0
def test_MultiLayerEncoder_call(input):
    conv = nn.Conv2d(3, 1, 1)
    relu = nn.ReLU(inplace=False)

    modules = (("conv", conv), ("relu", relu))
    mle = enc.MultiLayerEncoder(modules)

    actual = mle(input, "conv")
    expected = conv(input)
    ptu.assert_allclose(actual, expected)

    actual = mle(input, "relu")
    expected = relu(conv(input))
    ptu.assert_allclose(actual, expected)
Example #15
0
def test_compute_layer_weights():
    out_channels = (3, 6, 6)

    modules = (
        ("conv1", nn.Conv2d(1, out_channels[0], 3)),
        ("conv2", nn.Conv2d(out_channels[0], out_channels[1], 3)),
        ("pool", nn.MaxPool2d(2)),
    )
    multi_layer_encoder = enc.MultiLayerEncoder(modules)
    layers, _ = zip(*modules)

    layer_weights = paper.compute_layer_weights(
        layers, multi_layer_encoder=multi_layer_encoder)
    assert layer_weights == pytest.approx([1 / n**2 for n in out_channels])
Example #16
0
def test_MultiLayerEncoder_extract_encoder():
    conv = nn.Conv2d(3, 1, 1)
    relu = nn.ReLU(inplace=False)

    modules = (("conv", conv), ("relu", relu))
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    layer = "relu"
    single_layer_encoder = multi_layer_encoder.extract_encoder(layer)

    assert isinstance(single_layer_encoder, enc.SingleLayerEncoder)
    assert single_layer_encoder.multi_layer_encoder is multi_layer_encoder
    assert single_layer_encoder.layer == layer
    assert layer in multi_layer_encoder.registered_layers
Example #17
0
def test_SingleLayerEncoder_call():
    torch.manual_seed(0)
    conv = nn.Conv2d(3, 1, 1)
    relu = nn.ReLU(inplace=False)
    input = torch.rand(1, 3, 128, 128)

    modules = (("conv", conv), ("relu", relu))
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    single_layer_encoder = enc.SingleLayerEncoder(multi_layer_encoder, "conv")

    actual = single_layer_encoder(input)
    desired = conv(input)
    ptu.assert_allclose(actual, desired)
Example #18
0
def test_MultiLayerEncoder_empty_storage(forward_pass_counter):
    torch.manual_seed(0)
    input = torch.rand(1, 3, 128, 128)

    modules = (("count", forward_pass_counter),)
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    layers = ("count",)
    multi_layer_encoder(input, layers, store=True)
    multi_layer_encoder.empty_storage()
    multi_layer_encoder(input, layers)

    actual = forward_pass_counter.count
    desired = 2
    assert actual == desired
    def test_encode(self, input):
        conv = nn.Conv2d(3, 1, 1)
        relu = nn.ReLU(inplace=False)

        modules = (("conv", conv), ("relu", relu))
        mle = enc.MultiLayerEncoder(modules)

        encs = mle.encode(input, ("conv", "relu"))

        actual = encs[0]
        expected = conv(input)
        ptu.assert_allclose(actual, expected)

        actual = encs[1]
        expected = relu(conv(input))
        ptu.assert_allclose(actual, expected)
Example #20
0
    def test_MultiLayerEncoder_empty_storage(self):
        torch.manual_seed(0)
        count = ForwardPassCounter()
        input = torch.rand(1, 3, 128, 128)

        modules = (("count", count), )
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        layers = ("count", )
        multi_layer_encoder(input, layers, store=True)
        multi_layer_encoder.empty_storage()
        multi_layer_encoder(input, layers)

        actual = count.count
        desired = 2
        self.assertEqual(actual, desired)
Example #21
0
    def test_MultiLayerEncoder_extract_encoder(self):
        conv = nn.Conv2d(3, 1, 1)
        relu = nn.ReLU(inplace=False)

        modules = (("conv", conv), ("relu", relu))
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        layer = "relu"
        single_layer_encoder = multi_layer_encoder.extract_encoder(layer)

        self.assertIsInstance(single_layer_encoder, enc.SingleLayerEncoder)
        self.assertIs(single_layer_encoder.multi_layer_encoder,
                      multi_layer_encoder)
        self.assertEqual(single_layer_encoder.layer, layer)

        self.assertTrue(layer in multi_layer_encoder.registered_layers)
Example #22
0
    def test_MultiLayerEncoder_call(self):
        torch.manual_seed(0)
        conv = nn.Conv2d(3, 1, 1)
        relu = nn.ReLU(inplace=False)
        pool = nn.MaxPool2d(2)
        input = torch.rand(1, 3, 128, 128)

        modules = (("conv", conv), ("relu", relu), ("pool", pool))
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        layers = ("conv", "pool")
        encs = multi_layer_encoder(input, layers)

        actual = encs[0]
        desired = conv(input)
        self.assertTensorAlmostEqual(actual, desired)

        actual = encs[1]
        desired = pool(relu(conv(input)))
        self.assertTensorAlmostEqual(actual, desired)
Example #23
0
    def test_MultiLayerEncoder_trim(self):
        layers = [str(idx) for idx in range(3)]
        modules = [(layer, nn.Module()) for layer in layers]
        multi_layer_encoder = enc.MultiLayerEncoder(modules)

        for name, module in modules:
            actual = getattr(multi_layer_encoder, name)
            desired = module
            self.assertIs(actual, desired)

        idx = 1
        multi_layer_encoder.trim((str(idx), ))

        for name, module in modules[:idx + 1]:
            actual = getattr(multi_layer_encoder, name)
            desired = module
            self.assertIs(actual, desired)

        for name in tuple(zip(*modules))[0][idx + 1:]:
            with self.assertRaises(AttributeError):
                getattr(multi_layer_encoder, name)
Example #24
0
def test_MultiLayerEncoder_trim_layers():
    layers = [str(idx) for idx in range(3)]
    modules = [(layer, nn.Module()) for layer in layers]
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    for name, module in modules:
        actual = getattr(multi_layer_encoder, name)
        desired = module
        assert actual is desired

    idx = 1
    multi_layer_encoder.registered_layers.update([str(idx) for idx in range(idx + 1)])
    multi_layer_encoder.trim()

    for name, module in modules[: idx + 1]:
        actual = getattr(multi_layer_encoder, name)
        desired = module
        assert actual is desired

    for name in tuple(zip(*modules))[0][idx + 1 :]:
        with pytest.raises(AttributeError):
            getattr(multi_layer_encoder, name)
Example #25
0
def test_MultiLayerEncoder_extract_deepest_layer():
    layers = [str(idx) for idx in range(3)]
    modules = [(layer, nn.Module()) for layer in layers]
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    actual = multi_layer_encoder.extract_deepest_layer(layers)
    desired = layers[-1]
    assert actual == desired

    actual = multi_layer_encoder.extract_deepest_layer(sorted(layers, reverse=True))
    desired = layers[-1]
    assert actual == desired

    del multi_layer_encoder._modules[layers[-1]]

    with pytest.raises(ValueError):
        multi_layer_encoder.extract_deepest_layer(layers)

    layers = layers[:-1]

    actual = multi_layer_encoder.extract_deepest_layer(layers)
    desired = layers[-1]
    assert actual == desired
Example #26
0
def test_MultiLayerEncodingOperator():
    class TestOperator(ops.EncodingRegularizationOperator):
        def input_enc_to_repr(self, image):
            pass

        def calculate_score(self, input_repr):
            pass

    def get_encoding_op(encoder, score_weight):
        return TestOperator(encoder, score_weight=score_weight)

    layers = [str(index) for index in range(3)]
    modules = [(layer, nn.Module()) for layer in layers]
    multi_layer_encoder = enc.MultiLayerEncoder(modules)

    multi_layer_enc_op = ops.MultiLayerEncodingOperator(
        multi_layer_encoder, layers, get_encoding_op)

    for layer in layers:
        op = getattr(multi_layer_enc_op, layer)
        assert isinstance(op.encoder, enc.SingleLayerEncoder)
        assert op.encoder.layer == layer
        assert op.encoder.multi_layer_encoder is multi_layer_encoder
Example #27
0
input = torch.rand((batch_size, num_channels, height, width), device=device)


########################################################################################
# As a toy example to showcase the :class:`~pystiche.enc.MultiLayerEncoder`
# capabilities, we will use a CNN with three layers.

conv = nn.Conv2d(num_channels, num_channels, 3, padding=1)
relu = nn.ReLU(inplace=False)
pool = nn.MaxPool2d(2)

modules = [("conv", conv), ("relu", relu), ("pool", pool)]

seq = nn.Sequential(OrderedDict(modules)).to(device)
mle = enc.MultiLayerEncoder(modules).to(device)
print(mle)


########################################################################################
# Before we dive into the additional functionalities of the
# :class:`~pystiche.enc.MultiLayerEncoder` we perform a smoke test and assert that it
# indeed does the same as an :class:`torch.nn.Sequential` with the same layers.

assert torch.allclose(mle(input), seq(input))
print(fdifftimeit(lambda: seq(input), lambda: mle(input)))


########################################################################################
# As we saw, the :class:`~pystiche.enc.MultiLayerEncoder` produces the same output as
# an :class:`torch.nn.Sequential` but is slower. In the following we will learn what