Ejemplo n.º 1
0
def FullyConnected(size=None, activation='rel', name=None):
    """Create a Fully Connected (inner product) layer."""
    if size is None:
        return ConstructionWrapper.create(FullyConnectedLayerImpl, name=name,
                                          activation=activation)
    else:
        return ConstructionWrapper.create(FullyConnectedLayerImpl, size=size,
                                          name=name, activation=activation)
Ejemplo n.º 2
0
def test_get_layer_description():
    l = ConstructionWrapper.create('FooLayerImpl', name='foo')
    l2 = ConstructionWrapper.create('FooLayerImpl', name='bar')
    l3 = ConstructionWrapper.create('FooLayerImpl', name='baz')
    _ = l >> l2
    _ = l >> l3
    descr = get_layer_description(l.layer)
    assert descr == {
        '@type': 'Foo',
        '@outgoing_connections': {
            'default': ['bar.default', 'baz.default']
        }
    }
Ejemplo n.º 3
0
def test_get_layer_description_named_inputs_outputs():
    l = ConstructionWrapper.create('FooLayerImpl', name='foo')
    l2 = ConstructionWrapper.create('FooLayerImpl', name='bar')
    l3 = ConstructionWrapper.create('FooLayerImpl', name='baz')
    _ = l - 'out1' >> l2
    _ = l >> 'A' - l3
    descr = get_layer_description(l.layer)
    assert descr == {
        '@type': 'Foo',
        '@outgoing_connections': {
            'default': ['baz.A'],
            'out1': ['bar.default']
        }
    }
Ejemplo n.º 4
0
def CTC(name=None):
    """Create a CTC layer which integrates a softmax loss function and the CTC algorithm.

    Applies the softmax activation function on 'default' input and puts
    results (per-class probabilities) in 'predictions'. It then takes sequence labels
    from the 'targets' input and uses them to compute the CTC loss, which is stored in
    the 'loss' output (indexed by sequence, but not by time). Suitable deltas are
    computed and backpropagated towards the 'default' input.

    Note that the labels must be
    - in the range 1 ... # of classes *inclusive*. 0 may not be used as a label since 
      it stands for the CTC 'blank' node.
    - of the size (time, batchsize, 1). This is a technical requirement of brainstorm.
      Where the label sequence is shorter, zeros are to be used for padding the time axis.

    Consequently, the layer size passed to the function bs.tools.get_in_out_layers_for_ctc
    (if you create your system that way) must be # of classes + 1.

    IMPORTANT WARNING:
        This layer currently considers the input data mask to be boolean (i.e. no
        weights are allowed), and it should have the form 1 ... 1 0 ... 0 (only the final
        unbroken sequence of zeros is removed)

    WARNING:
        This layer does not compute derivatives wrt the 'targets' input.
        It also does not use the deltas coming in from the 'predictions'.
    """
    return ConstructionWrapper.create(CTCLayerImpl, name=name)
Ejemplo n.º 5
0
def Elementwise(activation='rel', name=None):
    """Create an Elementwise layer.

    This layer just applies a unit-wise function to its inputs.
    """
    return ConstructionWrapper.create(ElementwiseLayerImpl, name=name,
                                      activation=activation)
Ejemplo n.º 6
0
def Dropout(drop_prob=0.5, name=None):
    """Create a Dropout layer.

    drop_prob is the probability of a unit being dropped, i.e. 0
    """
    return ConstructionWrapper.create(DropoutLayerImpl,
                                      drop_prob=drop_prob,
                                      name=name)
Ejemplo n.º 7
0
def Input(out_shapes):
    """Create an Input layer.
    Special input layer type, that provides access to external data.

    The 'out_shapes' keyword argument is required and specifies the names and
    shapes of all external inputs.
    """
    return ConstructionWrapper.create(InputLayerImpl, out_shapes=out_shapes)
Ejemplo n.º 8
0
def Pooling2D(kernel_size, type='max', stride=(1, 1), padding=0, name=None):
    """Create a 2D Pooling layer."""
    return ConstructionWrapper.create(Pooling2DLayerImpl,
                                      kernel_size=kernel_size,
                                      type=type,
                                      stride=stride,
                                      padding=padding,
                                      name=name)
Ejemplo n.º 9
0
def test_layer_with_kwargs():
    l = ConstructionWrapper.create('FooLayerImpl', name='foo', a=2, b=3)
    descr = get_layer_description(l.layer)
    assert descr == {
        '@type': 'Foo',
        '@outgoing_connections': {},
        'a': 2,
        'b': 3
    }
Ejemplo n.º 10
0
def HighwayRNNCoupledGates(size,
                           activation='tanh',
                           name=None,
                           recurrence_depth=1):
    """Create a Simple Recurrent layer."""
    return ConstructionWrapper.create(HighwayRNNCoupledGatesLayerImpl,
                                      size=size,
                                      name=name,
                                      activation=activation,
                                      recurrence_depth=recurrence_depth)
Ejemplo n.º 11
0
def Convolution2D(num_filters, kernel_size, stride=(1, 1), padding=0,
                  activation='rel', name=None):
    """Create a 2D Convolution layer."""
    return ConstructionWrapper.create(Convolution2DLayerImpl,
                                      num_filters=num_filters,
                                      kernel_size=kernel_size,
                                      stride=stride,
                                      padding=padding,
                                      activation=activation,
                                      name=name)
def BatchNorm(name=None, decay=0.9, epsilon=1.0e-5):
    """Create a BatchNormalization layer.

    This layer implements batch normalization over the last (right-most)
    dimension. Thus, it can be use with both fully connected and convolutional
    layers (but only with data in NHWC format).
    """
    return ConstructionWrapper.create(BatchNormLayerImpl,
                                      name=name,
                                      decay=decay,
                                      epsilon=epsilon)
def DeltasScaling(factor, name=None):
    """Create an DeltasScaling layer.

    This layer does nothing on the forward pass, but scales the deltas flowing
    back during the backward pass by a given factor.

    This can be used to invert the deltas and set up an adversarial branch of
    the network.
    """
    return ConstructionWrapper.create(DeltasScalingLayerImpl, name=name,
                                      factor=factor)
def BatchNorm(name=None, decay=0.9, epsilon=1.0e-5):
    """Create a BatchNormalization layer.

    This layer implements batch normalization over the last (right-most)
    dimension. Thus, it can be use with both fully connected and convolutional
    layers (but only with data in NHWC format).
    """
    return ConstructionWrapper.create(BatchNormLayerImpl,
                                      name=name,
                                      decay=decay,
                                      epsilon=epsilon)
Ejemplo n.º 15
0
def DeltasScaling(factor, name=None):
    """Create an DeltasScaling layer.

    This layer does nothing on the forward pass, but scales the deltas flowing
    back during the backward pass by a given factor.

    This can be used to invert the deltas and set up an adversarial branch of
    the network.
    """
    return ConstructionWrapper.create(DeltasScalingLayerImpl,
                                      name=name,
                                      factor=factor)
Ejemplo n.º 16
0
def test_generate_architecture():
    l1 = ConstructionWrapper.create('InputLayerImpl')
    l2 = ConstructionWrapper.create('FooLayerImpl', name='bar')
    l3 = ConstructionWrapper.create('FooLayerImpl', name='baz')
    l4 = ConstructionWrapper.create('FooLayerImpl', name='out')
    _ = l1 - 'foo' >> l2 >> 'A' - l4
    _ = l1 - 'bar' >> l3 >> 'B' - l4

    arch1 = generate_architecture(l1.layer)
    arch2 = generate_architecture(l2.layer)
    arch3 = generate_architecture(l3.layer)
    assert arch1 == arch2
    assert arch1 == arch3
    assert arch1 == {
        'Input': {
            '@type': 'Input',
            '@outgoing_connections': {
                'foo': ['bar.default'],
                'bar': ['baz.default'],
            }

        },
        'bar': {
            '@type': 'Foo',
            '@outgoing_connections': {
                'default': ['out.A'],
            }
        },
        'baz': {
            '@type': 'Foo',
            '@outgoing_connections': {
                'default': ['out.B'],
            }
        },
        'out': {
            '@type': 'Foo',
            '@outgoing_connections': {}
        }
    }
Ejemplo n.º 17
0
def SigmoidCE(name=None):
    """Create a sigmoid layer with integrated Binomial Cross Entropy loss.

    Applies the sigmoid activation function on 'default' input and puts the
    results (per-label probabilities) in 'probabilities'.

    It also takes as 'targets' a binary vector and computes the binomial
    cross-entropy loss. The resulting losses are stored in the 'loss' output.

    WARNING:
        This layer does not compute derivatives wrt the 'targets' input.
        It also does not use the deltas coming in from the 'probabilities'.
    """
    return ConstructionWrapper.create(SigmoidCELayerImpl, name=name)
Ejemplo n.º 18
0
def SigmoidCE(name=None):
    """Create a sigmoid layer with integrated Binomial Cross Entropy loss.

    Applies the sigmoid activation function on 'default' input and puts the
    results (per-label probabilities) in 'predictions'.

    It also takes as 'targets' a binary vector and computes the binomial
    cross-entropy loss. The resulting losses are stored in the 'loss' output.

    WARNING:
        This layer does not compute derivatives wrt the 'targets' input.
        It also does not use the deltas coming in from the 'predictions'.
    """
    return ConstructionWrapper.create(SigmoidCELayerImpl, name=name)
Ejemplo n.º 19
0
def SquaredError(name=None):
    """
    Create a SquaredError layer which computes half of the squared difference
    between the inputs `default` and `targets` element-wise. The factor of half
    is used to be consistent with common machine learning texts and resources.

    Produces outputs named `predictions` and `loss`. The `loss` output can be
    connected to a ``Loss`` layer for typical network training for a
    regression task.

    This layer acts similar to ``SigmoidCE`` and ``SoftmaxCE`` layers. Like
    the above layers, it does not compute the gradients w.r.t. the `targets`
    input and ignores incoming deltas w.r.t. the `predictions` output.
    """
    return ConstructionWrapper.create(SquaredErrorLayerImpl, name=name)
Ejemplo n.º 20
0
def SquaredError(name=None):
    """
    Create a SquaredError layer which computes half of the squared difference
    between the inputs `default` and `targets` element-wise. The factor of half
    is used to be consistent with common machine learning texts and resources.

    Produces outputs named `predictions` and `loss`. The `loss` output can be
    connected to a ``Loss`` layer for typical network training for a
    regression task.

    This layer acts similar to ``SigmoidCE`` and ``SoftmaxCE`` layers. Like
    the above layers, it does not compute the gradients w.r.t. the `targets`
    input and ignores incoming deltas w.r.t. the `predictions` output.
    """
    return ConstructionWrapper.create(SquaredErrorLayerImpl, name=name)
def BinomialCrossEntropy(name=None):
    """Create a Binomial Cross Entropy Layer.

    Calculate the Binomial Cross Entropy between outputs and **binary** targets

    Cross entropy is by definition asymmetric, therefore the inputs are named
    'default' for the network outputs and 'targets' for the binary targets.
    This layer only calculates the deltas for the default inputs.
    Also note that this implementation only works for **binary** targets and
    outputs in the range 0 to 1.
    For outputs outside that range or non-binary targets the result is
    undefined.
    """
    return ConstructionWrapper.create(BinomialCrossEntropyLayerImpl,
                                      name=name)
Ejemplo n.º 22
0
def SoftmaxFiddle(name=None):
    """Create a softmax layer with integrated Multinomial Cross Entropy loss.

    Applies the softmax activation function on 'default' input and puts
    results (per-class probabilities) in 'predictions'.

    It also takes 'targets' as input (typically a one-hot vector),
    and computes the multinomial cross-entropy loss. The resulting losses are
    stored in the 'loss' output.

    For pixel/voxel-wise classification, the `channel` dimension must be
    right-most (known as NHWC or NDHWC format).

    WARNING:
        This layer does not compute derivatives wrt the 'targets' input.
        It also does not use the deltas coming in from the 'predictions'.
    """
    return ConstructionWrapper.create(SoftmaxFiddleLayerImpl, name=name)
Ejemplo n.º 23
0
def SoftmaxCE(name=None):
    """Create a softmax layer with integrated Multinomial Cross Entropy loss.

    Applies the softmax activation function on 'default' input and puts
    results (per-class probabilities) in 'probabilities'.

    It also takes class indices (0-based) as the 'targets' input,
    and computes the multinomial cross-entropy loss. The resulting losses are
    stored in the 'loss' output.

    For pixel/voxel-wise classification, the `channel` dimension must be
    right-most (known as NHWC or NDHWC format).

    WARNING:
        This layer does not compute derivatives wrt the 'targets' input.
        It also does not use the deltas coming in from the 'probabilities'.
    """
    return ConstructionWrapper.create(SoftmaxCELayerImpl, name=name)
Ejemplo n.º 24
0
def test_name_unconnected():
    l1 = ConstructionWrapper.create('FooLayerImpl', name='bar')
    l2 = ConstructionWrapper.create('FooLayerImpl', name='bar')
    assert l1.layer.name == 'bar'
    assert l2.layer.name == 'bar'
Ejemplo n.º 25
0
def Clockwork(size, timing, activation='tanh', name=None):
    return ConstructionWrapper.create(ClockworkLayerImpl,
                                      size=size,
                                      timing=timing,
                                      name=name,
                                      activation=activation)
Ejemplo n.º 26
0
def test_name_connected():
    l1 = ConstructionWrapper.create('FooLayerImpl', name='bar')
    l2 = ConstructionWrapper.create('FooLayerImpl', name='bar')
    _ = l1 >> l2
    assert l1.layer.name == 'bar_1'
    assert l2.layer.name == 'bar_2'
Ejemplo n.º 27
0
def ResidualLayer(size, name=None):
    """Create a Residual layer."""
    return ConstructionWrapper.create(ResidualLayerImpl, name=name, size=size)
Ejemplo n.º 28
0
def Lstm(size, activation='tanh', name=None):
    """Create an LSTM layer."""
    return ConstructionWrapper.create(LstmLayerImpl, size=size,
                                      name=name, activation=activation)
Ejemplo n.º 29
0
def Recurrent(size, activation='tanh', name=None):
    """Create a Simple Recurrent layer."""
    return ConstructionWrapper.create(RecurrentLayerImpl,
                                      size=size,
                                      name=name,
                                      activation=activation)
Ejemplo n.º 30
0
def Mask(name=None):
    """Create a Mask layer."""
    return ConstructionWrapper.create(MaskLayerImpl, name=name)
Ejemplo n.º 31
0
def Merge(name=None):
    """Create a layer that merges two inputs into one along the last dim"""
    return ConstructionWrapper.create(MergeLayerImpl, name=name)
Ejemplo n.º 32
0
def test_raises_on_invalid_layer_name():
    with pytest.raises(NetworkValidationError):
        i = ConstructionWrapper.create('layertype', name='also invalid.')
Ejemplo n.º 33
0
def L2Decay(name=None):
    """Add L2 regularization to the activations of a layer."""
    return ConstructionWrapper.create(L2DecayLayerImpl, name=name)
Ejemplo n.º 34
0
def Merge(name=None):
    """Create a layer that merges two inputs into one along the last dim"""
    return ConstructionWrapper.create(MergeLayerImpl, name=name)
Ejemplo n.º 35
0
def Loss(importance=1.0, name=None):
    """Create a Loss layer."""
    return ConstructionWrapper.create(LossLayerImpl, importance=importance,
                                      name=name)
Ejemplo n.º 36
0
def SquaredLoss(name=None):
    """Create a SquaredLoss layer which computes half of squared difference."""
    return ConstructionWrapper.create(SquaredLossLayerImpl, name=name)
def SquaredDifference(name=None):
    """Create a Squared Difference layer."""
    return ConstructionWrapper.create(SquaredDifferenceLayerImpl, name=name)
Ejemplo n.º 38
0
def Pooling2D(kernel_size, type='max', stride=(1, 1), padding=0, name=None):
    """Create a 2D Pooling layer."""
    return ConstructionWrapper.create(Pooling2DLayerImpl,
                                      kernel_size=kernel_size,
                                      type=type, stride=stride,
                                      padding=padding, name=name)
Ejemplo n.º 39
0
def test_raises_on_invalid_layer_type():
    with pytest.raises(NetworkValidationError):
        i = ConstructionWrapper.create('not valid!')
Ejemplo n.º 40
0
def Recurrent(size, activation='tanh', name=None):
    """Create a Simple Recurrent layer."""
    return ConstructionWrapper.create(RecurrentLayerImpl, size=size,
                                      name=name, activation=activation)
Ejemplo n.º 41
0
def test_constructor():
    cl = ConstructionWrapper.create('FooLayerImpl')
    assert cl.layer.layer_type == 'Foo'
    assert repr(cl) == "<Layer: 'default' - Foo - 'default'>"
Ejemplo n.º 42
0
def Square(name=None):
    """Create a layer that squares its inputs elementwise"""
    return ConstructionWrapper.create(SquareLayerImpl, name=name)
Ejemplo n.º 43
0
def Square(name=None):
    """Create a layer that squares its inputs elementwise"""
    return ConstructionWrapper.create(SquareLayerImpl, name=name)