Esempio n. 1
0
def test_add_flattened():
    a = ConstantAdder(value=1.)
    b = ConstantAdder(value=2.)
    c = ConstantAdder(value=3.)
    d = ConstantAdder(value=4.)
    e = ConstantAdder(value=5.)
    f = ConstantAdder(value=6.)
    seq = Sequential()
    seq.add_flattened([a, [b, [c, d], e], f])
    assert list(seq) == [a, b, c, d, e, f]
Esempio n. 2
0
def test_nested_access():
    with Sequential(name='alpha') as alpha:
        with Sequential(name='beta') as beta:
            with Sequential(name='gamma') as gamma:
                leaf = ConstantAdder(value=42., name='leaf')
                gamma += leaf
            beta += gamma
        alpha += beta

    alpha(0.)

    assert alpha['beta/gamma/leaf'] == leaf
    assert leaf.variable.name == 'alpha/beta/gamma/leaf/value:0'
Esempio n. 3
0
    def configure(self, config: Config):
        # Determine whether to apply an activation to the input, or to
        # place two of them within the depthwise separable convolutions.
        pre_activate = (config.skip_connection != SkipConnection.NONE)
        conv_activation = (config.activation.replace(
            activation=None) if pre_activate else config.activation)

        self.main_branch = Sequential((
            Activation(
                **config.activation.activation) if pre_activate else None,
            DepthwiseSeparableConv(
                num_outputs=depth,
                kernel_size=config.kernel_size,
                depth_multiplier=1,
                # Only use non-unitary stride for the final convolution
                strides=(config.stride if idx == (len(config.depths) -
                                                  1) else 1),
                dilation_rate=config.dilation_rate,
                padding=SamePadding(aligned=config.use_aligned_padding),
                use_bias=(not config.activation.absorbs_bias),
                activation=NormalizingActivation(**conv_activation),
                name='conv')) for idx, depth in enumerate(config.depths))

        # Skip connection
        self.skip_connection = SkipConnection(
            config.skip_connection).create(config=config)
Esempio n. 4
0
    def configure(self, config: Config):
        self.config = config

        # Each secondary feature map is first subjected to a channel projection
        self.secondary_projections = [
            Conv2D(filters=config.projection_depth,
                   kernel_size=1,
                   use_bias=(not config.activation.absorbs_bias),
                   activation=NormalizingActivation(**config.activation),
                   name='secondary_projection')
            for idx in range(config.num_secondary_feature_maps)
        ]

        self.refiner = Sequential(
            DepthwiseSeparableConv(num_outputs=config.decoder_depth,
                                   kernel_size=config.decoder_kernel_size,
                                   depth_multiplier=1,
                                   strides=1,
                                   dilation_rate=1,
                                   use_bias=(
                                       not config.activation.absorbs_bias),
                                   activation=NormalizingActivation(
                                       **config.activation),
                                   padding=SamePadding(
                                       aligned=config.use_aligned_padding),
                                   name='refiner')
            for unit in range(config.num_refining_units))
Esempio n. 5
0
def test_accessors():
    alpha = ConstantAdder(value=42., name='alpha')
    beta = ConstantAdder(value=16., name='beta')
    seq = Sequential([alpha, beta])

    assert len(seq) == 2
    assert seq[0] is alpha
    assert seq[1] is beta
    assert seq['alpha'] is alpha
    assert seq['beta'] is beta
    assert seq / 'alpha' is alpha
    assert seq / 'beta' is beta
    assert seq.alpha is alpha
    assert seq.beta is beta
Esempio n. 6
0
        def normalizing_activation_factory():
            functions = (
                Normalization(**config.normalization)
                if config.normalization is not None
                else None,

                Activation(**config.activation)
                if config.activation is not None
                else None
            )
            if config.activate_before_normalize:
                functions = reversed(functions)

            return Sequential(functions)
Esempio n. 7
0
def test_iteration():
    alpha = ConstantAdder(value=42.)
    beta = ConstantAdder(value=16.)
    seq = Sequential([alpha, beta])
    assert list(seq) == [alpha, beta]
Esempio n. 8
0
def test_no_scoping_guard():
    with pytest.raises(RuntimeError):
        with Sequential() as seq:
            seq += ConstantAdder(value=42., name='beta')
Esempio n. 9
0
def test_no_scoping():
    adder = ConstantAdder(value=42., name='beta')
    seq = Sequential([adder])
    seq(0.)
    assert adder.variable.name == 'beta/value:0'
Esempio n. 10
0
def test_scoping_explicit():
    with Sequential(scoped=True) as seq:
        adder = ConstantAdder(value=42., name='beta')
        seq += adder
    seq(0.)
    assert adder.variable.name == 'sequential/beta/value:0'
Esempio n. 11
0
def test_scoping():
    with Sequential(name='alpha') as seq:
        adder = ConstantAdder(value=42., name='beta')
        seq += adder
    seq(0.)
    assert adder.variable.name == 'alpha/beta/value:0'
Esempio n. 12
0
def test_basic():
    with Sequential(name='composite') as seq:
        seq += ConstantAdder(value=42.)
        seq += ConstantAdder(value=16.)
        seq += [ConstantAdder(value=1.), ConstantAdder(value=2.)]
    assert seq(12.).numpy() == 73.
Esempio n. 13
0
def get_simple_sequential():
    alpha = ConstantAdder(value=1., name='alpha')
    beta = ConstantAdder(value=2., name='beta')
    gamma = ConstantAdder(value=3., name='gamma')
    return alpha, beta, gamma, Sequential([alpha, beta, gamma])