Exemplo n.º 1
0
def mask_layer(spec):
    layer = MaskLayerImpl(
        'MaskLayer', {
            'default': BufferStructure('T', 'B', 3, 2),
            'mask': BufferStructure('T', 'B', 1)
        }, NO_CON, NO_CON)
    return layer, spec
Exemplo n.º 2
0
def merge(spec):
    in_shapes = {'inputs_1': BufferStructure('T', 'B', 3, 2),
                 'inputs_2': BufferStructure('T', 'B', 3, 4)}

    layer = MergeLayerImpl('Merge',
                           in_shapes, NO_CON, NO_CON)
    return layer, spec
Exemplo n.º 3
0
def squared_difference_layer(spec):
    in_shapes = {'inputs_1': BufferStructure('T', 'B', 3, 2),
                 'inputs_2': BufferStructure('T', 'B', 3, 2)
                 }

    layer = SquaredDifferenceLayerImpl('SquaredDifferenceLayer',
                                       in_shapes, NO_CON, NO_CON)
    return layer, spec
Exemplo n.º 4
0
def highway_layer(spec):
    in_shapes = {
        'H': BufferStructure('T', 'B', 2, 3),
        'T': BufferStructure('T', 'B', 2, 3),
        'x': BufferStructure('T', 'B', 2, 3)
    }
    layer = HighwayLayerImpl('HighwayLayer', in_shapes, NO_CON, NO_CON)
    return layer, spec
Exemplo n.º 5
0
def squared_error_layer(spec):
    in_shapes = {'default': BufferStructure('T', 'B', 3, 2),
                 'targets': BufferStructure('T', 'B', 3, 2)
                 }

    layer = SquaredErrorLayerImpl('SquaredErrorLayer',
                                  in_shapes, NO_CON, NO_CON)
    return layer, spec
Exemplo n.º 6
0
    def setup(self, kwargs, in_shapes):
        outputs = OrderedDict()
        outputs['loss'] = BufferStructure('T', 'B', 1)

        parameters = OrderedDict()
        internals = OrderedDict()
        internals['tmp'] = in_shapes['default']
        internals['dsq_activations'] = BufferStructure(
            *in_shapes['default'].shape, is_backward_only=True)

        return outputs, parameters, internals
Exemplo n.º 7
0
def test_combine_input_sizes_tuples():
    assert combine_buffer_structures([BufferStructure(1, 4)]) == \
           BufferStructure(1, 4)

    assert combine_buffer_structures([BufferStructure(4, 1),
                                      BufferStructure(4, 3),
                                      BufferStructure(4, 6)])\
        == BufferStructure(4, 10)

    assert combine_buffer_structures([BufferStructure(4, 3, 2),
                                      BufferStructure(4, 3, 3),
                                      BufferStructure(4, 3, 2)]) == \
        BufferStructure(4, 3, 7)
Exemplo n.º 8
0
def sigmoid_ce_layer(spec):
    time_steps = spec.get('time_steps', 3)
    batch_size = spec.get('batch_size', 2)
    feature_dim = (2, 3, 5)
    target_shape = (time_steps, batch_size) + feature_dim
    targets = np.random.randint(0, 2, target_shape)
    in_shapes = {'default': BufferStructure('T', 'B', *feature_dim),
                 'targets': BufferStructure('T', 'B', *target_shape[2:])}

    layer = SigmoidCELayerImpl('SigmoidCELayer', in_shapes, NO_CON,
                               NO_CON)

    spec['targets'] = targets
    return layer, spec
Exemplo n.º 9
0
    def setup(self, kwargs, in_shapes):
        self.activation = kwargs.get('activation', 'tanh')
        self.size = kwargs.get('size', in_shapes['default'].feature_size)

        if not isinstance(self.size, int):
            raise LayerValidationError('size must be int but was {}'.
                                       format(self.size))

        in_size = self.in_shapes['default'].feature_size

        outputs = OrderedDict()
        outputs['default'] = BufferStructure('T', 'B', self.size, context_size=1)

        parameters = OrderedDict()
        parameters['W'] = BufferStructure(self.size, in_size)
        parameters['R'] = BufferStructure(self.size, self.size)
        parameters['bias'] = BufferStructure(self.size)
        parameters['timing'] = BufferStructure(self.size)

        internals = OrderedDict()
        internals['Ha'] = BufferStructure('T', 'B', self.size, context_size=1)
        internals['dHa'] = BufferStructure('T', 'B', self.size, context_size=1,
                                           is_backward_only=True)
        internals['dHb'] = BufferStructure('T', 'B', self.size, context_size=1,
                                           is_backward_only=True)

        return outputs, parameters, internals
Exemplo n.º 10
0
def test_layer_constructor():
    a = Connection('l', 'default', 'A', 'default')
    b = Connection('l', 'default', 'B', 'default')
    c = Connection('l', 'default', 'C', 'default')

    l = FullyConnectedLayerImpl('LayerName',
                                {'default': BufferStructure('T', 'B', 5)}, {c},
                                {a, b},
                                size=8)
    expected = {'default': BufferStructure('T', 'B', 8)}
    assert l.out_shapes == expected
    assert l.in_shapes == {'default': BufferStructure('T', 'B', 5)}
    assert l.incoming == {c}
    assert l.outgoing == {a, b}
    assert l.kwargs == {'size': 8}
Exemplo n.º 11
0
def fully_connected_layer_2d(spec):
    in_shapes = {'default': BufferStructure('T', 'B', 2, 3)}
    layer = FullyConnectedLayerImpl('FullyConnectedLayer', in_shapes,
                                    NO_CON, NO_CON,
                                    size=(3, 3, 1),
                                    activation=spec['activation'])
    return layer, spec
Exemplo n.º 12
0
def lstm_layer_2d(spec):
    layer = LstmLayerImpl('LstmLayer',
                          {'default': BufferStructure('T', 'B', 2, 2, 1)},
                          NO_CON, NO_CON,
                          size=3,
                          activation=spec['activation'])
    return layer, spec
Exemplo n.º 13
0
def rnn_layer_2d(spec):
    layer = RecurrentLayerImpl('RnnLayer',
                               {'default': BufferStructure('T', 'B', 2, 1, 2)},
                               NO_CON, NO_CON,
                               size=3,
                               activation=spec['activation'])
    return layer, spec
Exemplo n.º 14
0
def binomial_crossentropy_layer(spec):
    time_steps = spec.get('time_steps', 3)
    batch_size = spec.get('batch_size', 2)
    size = 5
    shape = (time_steps, batch_size, size)
    default = np.random.rand(*shape)
    targets = np.random.randint(0, 2, shape)
    in_shapes = {'default': BufferStructure('T', 'B', size),
                 'targets': BufferStructure('T', 'B', size)}

    layer = BinomialCrossEntropyLayerImpl('BinomialCrossEntropyError',
                                          in_shapes, NO_CON, NO_CON)

    spec['default'] = default
    spec['targets'] = targets
    return layer, spec
Exemplo n.º 15
0
    def create(source_set, sink_set, layout, connections):
        def ensure_uniform(l):
            assert min(l) == max(l)
            return l[0]

        sorted_sources = sorted(source_set)
        flat_sources = list(flatten(sorted_sources))
        nesting = convert_to_nested_indices(sorted_sources)

        # get buffer type for hub and assert its uniform
        structs = [
            BufferStructure.from_layout(get_by_path(layout, s))
            for s in flat_sources
        ]
        btype = ensure_uniform([s.buffer_type for s in structs])
        # max context size
        context_size = max([s.context_size for s in structs])

        hub = Hub(flat_sources, nesting, sorted(sink_set), btype, context_size)
        hub.setup(connections)
        hub.sizes = [structs[i].feature_size for i in hub.perm]
        hub.size = sum(hub.sizes)
        hub.is_backward_only = ensure_uniform(
            [structs[i].is_backward_only for i in hub.perm])
        return hub
Exemplo n.º 16
0
def create_buffer_views_from_layout(layout, buffers, hubs, existing_view=None):
    if '@slice' in layout:
        buffer_nr = layout['@hub']
        feature_slice = slice(*layout['@slice'])
        structure = BufferStructure.from_layout(layout)
        full_buffer = structure.create_from_buffer_hub(buffers[buffer_nr],
                                                       hubs[buffer_nr],
                                                       feature_slice)
    else:
        full_buffer = None

    if layout['@type'] == 'BufferView':
        names, child_buffers = [], []
        for n, sub_node in sorted(layout.items(), key=sort_by_index_key):
            if n.startswith('@'):
                continue
            if existing_view:
                assert n in existing_view
                c = create_buffer_views_from_layout(
                    sub_node, buffers, hubs, existing_view=existing_view[n])
            else:
                c = create_buffer_views_from_layout(sub_node, buffers, hubs)
            names.append(n)
            child_buffers.append(c)

        if existing_view:
            return existing_view.adjust(names, child_buffers, full_buffer)
        else:
            return BufferView(names, child_buffers, full_buffer)
    else:  # layout['@type'] == 'array':
        assert full_buffer is not None, layout
        return full_buffer
Exemplo n.º 17
0
    def setup(self, kwargs, in_shapes):
        # 'inputs_1' and 'inputs_2' must have same shape
        f_shape1 = in_shapes['inputs_1'].feature_shape
        f_shape2 = in_shapes['inputs_2'].feature_shape
        if f_shape1 != f_shape2:
            raise LayerValidationError(
                "{}: inputs_1 and inputs_2 must have same feature shapes but "
                "got {} and {}".format(self.name, f_shape1, f_shape2))

        outputs = OrderedDict()
        outputs['default'] = BufferStructure('T', 'B', *f_shape1)

        internals = OrderedDict()
        feature_shape = self.in_shapes['inputs_1'].feature_shape
        internals['diff'] = BufferStructure('T', 'B', *feature_shape)
        return outputs, OrderedDict(), internals
Exemplo n.º 18
0
def create_buffer_views_from_layout(layout, buffers, hubs, existing_view=None):
    if '@slice' in layout:
        buffer_nr = layout['@hub']
        feature_slice = slice(*layout['@slice'])
        structure = BufferStructure.from_layout(layout)
        full_buffer = structure.create_from_buffer_hub(
            buffers[buffer_nr], hubs[buffer_nr], feature_slice)
    else:
        full_buffer = None

    if layout['@type'] == 'BufferView':
        names, child_buffers = [], []
        for n, sub_node in sorted(layout.items(), key=sort_by_index_key):
            if n.startswith('@'):
                continue
            if existing_view:
                assert n in existing_view
                c = create_buffer_views_from_layout(
                    sub_node, buffers, hubs, existing_view=existing_view[n])
            else:
                c = create_buffer_views_from_layout(sub_node, buffers, hubs)
            names.append(n)
            child_buffers.append(c)

        if existing_view:
            return existing_view.adjust(names, child_buffers, full_buffer)
        else:
            return BufferView(names, child_buffers, full_buffer)
    else:  # layout['@type'] == 'array':
        assert full_buffer is not None, layout
        return full_buffer
Exemplo n.º 19
0
    def setup(self, kwargs, in_shapes):
        # 'default' and 'targets' must have same shape
        in_shape = in_shapes['default'].feature_shape
        tar_shape = in_shapes['targets'].feature_shape
        if in_shape != tar_shape:
            raise LayerValidationError(
                "{}: default and targets must have same feature shapes but "
                "got {} and {}".format(self.name, in_shape, tar_shape))

        outputs = OrderedDict()
        outputs['predictions'] = BufferStructure('T', 'B', *in_shape)
        outputs['loss'] = BufferStructure('T', 'B', *in_shape)

        internals = OrderedDict()
        internals['diff'] = BufferStructure('T', 'B', *in_shape)
        return outputs, OrderedDict(), internals
Exemplo n.º 20
0
def elementwise_layer(spec):
    layer = ElementwiseLayerImpl('Elementwise',
                                 {'default': BufferStructure('T', 'B', 3, 2)},
                                 NO_CON,
                                 NO_CON,
                                 activation=spec['activation'])
    return layer, spec
Exemplo n.º 21
0
def test_raises_on_unexpected_kwargs(LayerClass):
    with pytest.raises(LayerValidationError) as excinfo:
        l = LayerClass('LayerName', {'default': BufferStructure(5, )},
                       NO_CON,
                       NO_CON,
                       some_foo=16)
    assert 'some_foo' in excinfo.value.args[0]
Exemplo n.º 22
0
def convolution_layer_2d(spec, input_shape=(4, 4, 1),
                         num_filters=1, kernel_size=(2, 2), stride=(1, 1)):
    x = BufferStructure('T', 'B', *input_shape)
    layer = Convolution2DLayerImpl('Convolution2DLayer', {'default': x},
                                   NO_CON, NO_CON, num_filters=num_filters,
                                   kernel_size=kernel_size, stride=stride,
                                   activation=spec['activation'])
    return layer, spec
Exemplo n.º 23
0
    def setup(self, kwargs, in_shapes):
        in_shape = in_shapes['default'].feature_shape
        tar_shape = in_shapes['targets'].feature_shape

        if tar_shape != in_shape:
            raise LayerValidationError('input and targets must have the same '
                                       'shapes. But got {} != {}'
                                       .format(in_shape, tar_shape))

        outputs = OrderedDict()
        outputs['predictions'] = BufferStructure('T', 'B', *in_shape)
        outputs['loss'] = BufferStructure('T', 'B', *in_shape)

        internals = OrderedDict()
        internals['dcee'] = BufferStructure('T', 'B', *in_shape,
                                            is_backward_only=True)
        return outputs, OrderedDict(), internals
Exemplo n.º 24
0
def avgpooling_layer_2d(spec):
    layer = Pooling2DLayerImpl('Pooling2DLayer',
                               {'default':
                                BufferStructure('T', 'B', 4, 4, 1)},
                               NO_CON, NO_CON,
                               kernel_size=(2, 2), stride=(1, 1),
                               type="avg")
    return layer, spec
Exemplo n.º 25
0
def clockwork_layer_2d(spec):
    layer = ClockworkLayerImpl('ClockworkRnn',
                               {'default': BufferStructure('T', 'B', 2, 1, 2)},
                               NO_CON, NO_CON,
                               size=7,
                               activation=spec['activation'])
    spec['inits'] = {'timing': np.array([1, 1, 2, 2, 3, 3, 5])}
    return layer, spec
    def setup(self, kwargs, in_shapes):
        if in_shapes['default'] != in_shapes['targets']:
            raise LayerValidationError("{}: default and targets must have the "
                                       "same shapes but got {} and {}"
                                       .format(self.name,
                                               in_shapes['default'],
                                               in_shapes['targets']))
        outputs = OrderedDict()
        outputs['default'] = BufferStructure('T', 'B', 1)

        feature_shape = in_shapes['default'].feature_shape
        internals = OrderedDict()
        internals['cee'] = BufferStructure('T', 'B', *feature_shape)
        internals['ceed'] = BufferStructure('T', 'B', *feature_shape,
                                            is_backward_only=True)

        return outputs, OrderedDict(), internals
Exemplo n.º 27
0
    def setup(self, kwargs, in_shapes):
        self.activation = kwargs.get('activation', 'tanh')
        assert 'num_filters' in kwargs, "num_filters must be specified " \
                                        " for ConvolutionLayer"
        assert 'kernel_size' in kwargs, "kernel_size must be specified " \
                                        "for ConvolutionLayer"
        num_filters = kwargs['num_filters']
        kernel_size = kwargs['kernel_size']
        stride = kwargs.get('stride', (1, 1))
        padding = kwargs.get('padding', 0)
        assert type(padding) is int and padding >= 0, \
            "Invalid padding: {}".format(padding)
        assert type(kernel_size) in [list, tuple] and \
            len(kernel_size) == 2, "Kernel size must be list or tuple  of " \
                                   "length 2: {}".format(kernel_size)
        assert type(stride) in [list, tuple] and len(stride) == 2, \
            "Stride must be list or tuple of length 2: {}".format(stride)
        in_shape = self.in_shapes['default'].feature_shape
        assert stride[0] >= 0 and stride[1] >= 0, \
            "Invalid stride: {}".format(stride)
        assert isinstance(in_shape, tuple) and len(in_shape) == 3, \
            "ConvolutionLayer2D must have 3 dimensional input but input " \
            "shape was {}".format(in_shape)
        self.num_filters = num_filters
        self.kernel_size = tuple(kernel_size)
        self.stride = tuple(stride)
        self.padding = padding
        kernel_x, kernel_y = self.kernel_size
        num_input_maps = in_shape[2]

        output_height = ((in_shape[0] + 2 * padding - kernel_x) //
                         stride[0]) + 1
        output_width = ((in_shape[1] + 2 * padding - kernel_y) //
                        stride[1]) + 1
        out_shape = (output_height, output_width, num_filters)

        outputs = OrderedDict()
        outputs['default'] = BufferStructure('T', 'B', *out_shape)

        parameters = OrderedDict()
        parameters['W'] = BufferStructure(num_filters, kernel_x, kernel_y,
                                          num_input_maps)
        parameters['bias'] = BufferStructure(num_filters)

        internals = OrderedDict()
        return outputs, parameters, internals
Exemplo n.º 28
0
    def setup(self, kwargs, in_shapes):
        assert 'kernel_size' in kwargs, "kernel_size must be specified for " \
                                        "Pooling2D"
        assert 'type' in kwargs, "type must be specified for Pooling2D"
        kernel_size = kwargs['kernel_size']
        ptype = kwargs['type']
        padding = kwargs.get('padding', 0)
        stride = kwargs.get('stride', (1, 1))
        in_shape = self.in_shapes['default'].feature_shape
        assert ptype in ('max', 'avg')
        assert type(padding) is int and padding >= 0, \
            "Invalid padding: {}".format(padding)
        assert type(kernel_size) in [list, tuple] and \
            len(kernel_size) == 2, "Kernel size must be list or " \
                                   "tuple  of length 2: {}".format(
                                   kernel_size)
        assert type(stride) in [list, tuple] and len(stride) == 2, \
            "Stride must be list or tuple of length 2: {}".format(stride)
        assert stride[0] >= 0 and stride[1] >= 0, \
            "Invalid stride: {}".format(stride)
        assert isinstance(in_shape, tuple) and len(in_shape) == 3, \
            "PoolingLayer2D must have 3 dimensional input but input " \
            "shape was %s" % in_shape

        self.kernel_size = tuple(kernel_size)
        self.type = ptype
        self.padding = padding
        self.stride = tuple(stride)
        output_height = ((in_shape[0] + 2 * padding - kernel_size[0]) //
                         stride[0]) + 1
        output_width = ((in_shape[1] + 2 * padding - kernel_size[1]) //
                        stride[1]) + 1
        assert output_height > 0 and output_width > 0, \
            "Evaluated output height and width must be positive but were " \
            "({}, {})".format(output_height, output_width)
        output_shape = (output_height, output_width, in_shape[2])

        outputs = OrderedDict()
        outputs['default'] = BufferStructure('T', 'B', *output_shape)

        internals = OrderedDict()
        if self.type == 'max':
            argmax_shape = outputs['default'].feature_shape
            internals['argmax'] = BufferStructure('T', 'B', *argmax_shape)
        return outputs, OrderedDict(), internals
Exemplo n.º 29
0
def clockwork_lstm_layer(spec):
    layer = ClockworkLstmLayerImpl('ClockworkLstm',
                                   {'default': BufferStructure('T', 'B', 3)},
                                   NO_CON, NO_CON,
                                   size=4,
                                   activation=spec['activation'])

    spec['inits'] = {'timing': np.array([1, 2, 2, 3])}
    return layer, spec
Exemplo n.º 30
0
def softmax_fiddle_layer(spec):
    time_steps = spec.get('time_steps', 3)
    batch_size = spec.get('batch_size', 2)
    feature_dim = (4, )
    target_shape = (time_steps, batch_size) + feature_dim
    targets = np.random.randint(0, 2, target_shape).astype(np.float)
    targets /= np.clip(targets.sum(2)[:, :, None], 1, 10000)
    print('TARGETS:', targets)
    in_shapes = {
        'default': BufferStructure('T', 'B', *feature_dim),
        'targets': BufferStructure('T', 'B', *target_shape[2:])
    }

    layer = SoftmaxFiddleLayerImpl('SoftmaxFiddleLayer', in_shapes, NO_CON,
                                   NO_CON)

    spec['targets'] = targets
    return layer, spec
Exemplo n.º 31
0
    def setup(self, kwargs, in_shapes):
        outputs = OrderedDict()
        outputs['loss'] = BufferStructure('T', 'B', 1)

        parameters = OrderedDict()
        internals = OrderedDict()
        internals['tmp'] = in_shapes['default']

        return outputs, parameters, internals
Exemplo n.º 32
0
    def create(source_set, sink_set, layout, connections):
        def ensure_uniform(l):
            assert min(l) == max(l)
            return l[0]

        sorted_sources = sorted(source_set)
        flat_sources = list(flatten(sorted_sources))
        nesting = convert_to_nested_indices(sorted_sources)

        # get buffer type for hub and assert its uniform
        structs = [BufferStructure.from_layout(get_by_path(layout, s)) for s in flat_sources]
        btype = ensure_uniform([s.buffer_type for s in structs])
        # max context size
        context_size = max([s.context_size for s in structs])

        hub = Hub(flat_sources, nesting, sorted(sink_set), btype, context_size)
        hub.setup(connections)
        hub.sizes = [structs[i].feature_size for i in hub.perm]
        hub.size = sum(hub.sizes)
        hub.is_backward_only = ensure_uniform([structs[i].is_backward_only for i in hub.perm])
        return hub