Ejemplo n.º 1
0
    def fail_if_shape_invalid(self, input_shapes):
        n_input_layers = len(input_shapes)

        try:
            gate_shape = input_shapes[self.gate_index]
        except IndexError:
            raise LayerConnectionError(
                "Invalid index for gating layer. Number of input "
                "layers: {}. Gating layer index: {}"
                "".format(n_input_layers, self.gate_index))

        other_shapes = exclude_index(input_shapes, self.gate_index)
        if gate_shape and len(gate_shape) != 2:
            raise LayerConnectionError(
                "Output from the gating network should be 2-dimensional. "
                "Output shape from gating layer: {!r}"
                "".format(gate_shape))

        n_expected_networks = gate_shape[-1]
        # Note: -1 from all layers in order to exclude gating layer
        if n_expected_networks != (n_input_layers - 1):
            raise LayerConnectionError(
                "Gating layer can work only for combining only {} networks, "
                "got {} networks instead."
                "".format(n_expected_networks, (n_input_layers - 1)))

        for shape in other_shapes:
            if not shape.is_compatible_with(other_shapes[0]):
                raise LayerConnectionError(
                    "Output layer that has to be merged expect to "
                    "have the same shapes. Shapes: {!r}"
                    "".format(tf_utils.shape_to_tuple(other_shapes)))
Ejemplo n.º 2
0
    def validate(self, input_shapes):
        # The axis value has 0-based indeces where 0s index points
        # to the batch dimension of the input. Shapes in the neupy
        # do not store information about the batch and we need to
        # put None value on the 0s position.
        valid_shape = as_tuple(None, input_shapes[0])

        # Avoid using negative indeces
        possible_axes = list(range(len(valid_shape)))
        concat_axis = possible_axes[self.axis]

        for input_shape in input_shapes[1:]:
            if len(input_shapes[0]) != len(input_shape):
                raise LayerConnectionError(
                    "Cannot concatenate layers, because inputs have "
                    "different number of dimensions. Shapes: {} and {}"
                    "".format(input_shapes[0], input_shape))

            for axis, axis_size in enumerate(input_shape, start=1):
                if axis != concat_axis and valid_shape[axis] != axis_size:
                    raise LayerConnectionError(
                        "Cannot concatenate layers, because some of them "
                        "don't match over dimension #{} (0-based indeces)."
                        "Shapes: {} and {}"
                        "".format(axis, input_shapes[0], input_shape))
Ejemplo n.º 3
0
    def validate(self, input_shapes):
        n_input_layers = len(input_shapes)
        gating_layer_index = self.gating_layer_index

        try:
            gating_layer_shape = input_shapes[gating_layer_index]
        except IndexError:
            raise LayerConnectionError(
                "Invalid index for gating layer. Number of input "
                "layers: {}. Gating layer index: {}"
                "".format(n_input_layers, gating_layer_index))

        other_layers_shape = exclude_index(input_shapes, gating_layer_index)

        if len(gating_layer_shape) != 1:
            raise LayerConnectionError(
                "Output from the gating network should be vector. Output "
                "shape from gating layer: {!r}".format(gating_layer_shape))

        n_gating_weights = gating_layer_shape[0]
        # Note: -1 from all layers in order to exclude gating layer
        if n_gating_weights != (n_input_layers - 1):
            raise LayerConnectionError(
                "Gating layer can work only for combining only {} networks, "
                "got {} networks instead."
                "".format(n_gating_weights, (n_input_layers - 1)))

        if not all_equal(other_layers_shape):
            raise LayerConnectionError(
                "Output layer that has to be merged expect to have the "
                "same shapes. Shapes: {!r}".format(other_layers_shape))
Ejemplo n.º 4
0
 def fail_if_shape_invalid(self, input_shape):
     if input_shape and input_shape.ndims != 3:
         clsname = self.__class__.__name__
         raise LayerConnectionError(
             "{} layer was expected input with three dimensions, "
             "but got input with {} dimensions instead. Layer: {}"
             "".format(clsname, input_shape.ndims, self))
Ejemplo n.º 5
0
 def fail_if_shape_invalid(self, input_shape):
     if input_shape and input_shape.ndims != 4:
         raise LayerConnectionError(
             "Upscale layer should have an input value with 4 dimensions "
             "(batch, height, width, channel), got input with {} "
             "dimensions instead. Shape: {}"
             "".format(input_shape.ndims, input_shape))
Ejemplo n.º 6
0
 def get_output_shape(self, input_shape):
     if not self.input_shape.is_compatible_with(input_shape):
         raise LayerConnectionError(
             "Input layer got unexpected input shape. "
             "Received shape: {}, Expected shape: {}"
             "".format(input_shape, self.input_shape))
     return self.input_shape.merge_with(input_shape)
Ejemplo n.º 7
0
 def validate(self, input_shapes):
     n_unique_shapes = len(set(input_shapes))
     if n_unique_shapes != 1:
         raise LayerConnectionError(
             "The `{}` layer expects all input values with the "
             "same shapes. Input shapes: {}"
             "".format(self, input_shapes))
Ejemplo n.º 8
0
 def validate(self, input_shape):
     if len(input_shape) != 3:
         raise LayerConnectionError(
             "Pooling layer expects an input with 3 "
             "dimensions, got {} with shape {}"
             "".format(len(input_shape), input_shape)
         )
Ejemplo n.º 9
0
    def output(self, input_value):
        if not self.input_shape:
            raise LayerConnectionError(
                "Layer `{}` doesn't have defined input shape. Probably "
                "it doesn't have an input layer.".format(self))

        half = self.n // 2
        squared_value = input_value ** 2

        n_samples = input_value.shape[0]
        channel = input_value.shape[1]
        height = input_value.shape[2]
        width = input_value.shape[3]

        zero = asfloat(0)
        extra_channels = T.alloc(zero, n_samples, channel + 2 * half,
                                 height, width)
        squared_value = T.set_subtensor(
            extra_channels[:, half:half + channel, :, :],
            squared_value
        )
        scale = self.k

        for i in range(self.n):
            scale += self.alpha * squared_value[:, i:i + channel, :, :]

        scale = scale ** self.beta
        return input_value / scale
Ejemplo n.º 10
0
    def add_layer(self, layer):
        """
        Add new layer into the graph.

        Parameters
        ----------
        layer : layer

        Returns
        -------
        bool
            Returns ``False`` if layer has beed already added into
            graph and there is no need to add it again, and
            ``True`` - if layer is a new and was added successfully.
        """
        if layer in self.forward_graph:
            return False

        for existed_layer in self.forward_graph:
            if existed_layer.name == layer.name:
                raise LayerConnectionError(
                    "Cannot connect {} layer. Layer with name {!r} has been "
                    "already defined in the graph.".format(layer, layer.name))

        self.forward_graph[layer] = []
        self.backward_graph[layer] = []

        return True
Ejemplo n.º 11
0
    def validate(self, input_shape):
        ndim = len(input_shape)

        if ndim != 3:
            raise LayerConnectionError(
                "Layer `{}` expected input with 3 dimensions, got {}"
                "".format(self, ndim))
Ejemplo n.º 12
0
    def get_output_shape(self, input_shape):
        if input_shape and input_shape.ndims != 4:
            raise LayerConnectionError(
                "Layer `{}` expected input with 4 dimensions, got {} instead. "
                "Shape: {}".format(self.name, input_shape.ndims, input_shape))

        return super(LocalResponseNorm, self).get_output_shape(input_shape)
Ejemplo n.º 13
0
    def add_connection(self, from_layer, to_layer):
        """
        Add new directional connection between two layers.

        Parameters
        ----------
        from_layer : layer
        to_layer : layer

        Raises
        ------
        LayerConnectionError
            Raises if it's impossible to connect two layers or
            new connection creates cycles in graph.

        Returns
        -------
        bool
            Returns ``False`` if connection has already been added into
            the graph, and ``True`` if connection was added successfully.
        """
        if from_layer is to_layer:
            raise LayerConnectionError("Cannot connect layer `{}` "
                                       "to itself".format(from_layer))

        self.add_layer(from_layer)
        self.add_layer(to_layer)

        forward_connections = self.forward_graph[from_layer]
        backward_connections = self.backward_graph[to_layer]

        if to_layer in forward_connections:
            # Layers have been already connected
            return False

        forward_connections.append(to_layer)
        backward_connections.append(from_layer)

        if is_cyclic(self.forward_graph):
            raise LayerConnectionError(
                "Cannot connect layer `{}` to `{}`, because this "
                "connection creates cycle in the graph."
                "".format(from_layer, to_layer))

        return True
Ejemplo n.º 14
0
    def validate(self, input_shapes):
        classname = self.__class__.__name__

        if not isinstance(input_shapes, list):
            raise LayerConnectionError("{} layer expected 2 inputs, got 1"
                                       "".format(classname))

        if len(input_shapes) != 2:
            n_inputs = len(input_shapes)
            raise LayerConnectionError("{} layer expected 2 inputs, got {}"
                                       "".format(classname, n_inputs))

        for input_shape in input_shapes:
            ndim = len(input_shape)

            if ndim != 1:
                raise LayerConnectionError("Input layer to {} should be 2D, "
                                           "got {}D".format(classname, ndim))
Ejemplo n.º 15
0
    def validate(self, input_shape):
        n_input_dims = len(input_shape) + 1  # +1 for batch dimension
        clsname = self.__class__.__name__

        if n_input_dims < 3:
            raise LayerConnectionError(
                "{} layer was expected input with at least three "
                "dimensions, got input with {} dimensions instead"
                "".format(clsname, n_input_dims))
Ejemplo n.º 16
0
    def get_output_shape(self, input_shape):
        input_shape = tf.TensorShape(input_shape)

        if input_shape and input_shape.ndims != 4:
            raise LayerConnectionError(
                "Group normalization layer expects 4 dimensional input, "
                "got {} instead. Input shape: {}, Layer: {}"
                "".format(input_shape.ndims, input_shape, self))

        n_channels = input_shape[3]

        if n_channels.value and n_channels % self.n_groups != 0:
            raise LayerConnectionError(
                "Cannot divide {} input channels into {} groups. "
                "Input shape: {}, Layer: {}".format(n_channels, self.n_groups,
                                                    input_shape, self))

        return super(GroupNorm, self).get_output_shape(input_shape)
Ejemplo n.º 17
0
    def get_output_shape(self, input_shape):
        input_shape = tf.TensorShape(input_shape)

        if input_shape and input_shape.ndims != 4:
            raise LayerConnectionError(
                "DropBlock layer expects input with 4 dimensions, got {} "
                "with shape {}".format(len(input_shape), input_shape))

        return input_shape
Ejemplo n.º 18
0
    def validate(self, input_shapes):
        valid_shape = as_tuple(None, input_shapes[0])

        for input_shape in input_shapes[1:]:
            for axis, axis_size in enumerate(input_shape, start=1):
                if axis != self.axis and valid_shape[axis] != axis_size:
                    raise LayerConnectionError(
                        "Cannot concatenate layers. Some of them don't "
                        "match over dimension #{} (0-based indeces)."
                        "".format(axis))
Ejemplo n.º 19
0
def validate_graphs_before_combining(left_graph, right_graph):
    left_out_layers = left_graph.output_layers
    right_in_layers = right_graph.input_layers

    if len(left_out_layers) > 1 and len(right_in_layers) > 1:
        raise LayerConnectionError(
            "Cannot make many to many connection between graphs. One graph "
            "has {n_left_outputs} outputs (layer names: {left_names}) and "
            "the other one has {n_right_inputs} inputs (layer names: "
            "{right_names}). First graph: {left_graph}, Second graph: "
            "{right_graph}".format(
                left_graph=left_graph,
                n_left_outputs=len(left_out_layers),
                left_names=[layer.name for layer in left_out_layers],
                right_graph=right_graph,
                n_right_inputs=len(right_in_layers),
                right_names=[layer.name for layer in right_in_layers],
            ))

    left_out_shapes = as_tuple(left_graph.output_shape)
    right_in_shapes = as_tuple(right_graph.input_shape)

    for left_layer, left_out_shape in zip(left_out_layers, left_out_shapes):
        right = zip(right_in_layers, right_in_shapes)

        for right_layer, right_in_shape in right:
            if left_out_shape.is_compatible_with(right_in_shape):
                continue

            raise LayerConnectionError(
                "Cannot connect layer `{left_name}` to layer `{right_name}`, "
                "because output shape ({left_out_shape}) of the first layer "
                "is incompatible with the input shape ({right_in_shape}) "
                "of the second layer. First layer: {left_layer}, Second "
                "layer: {right_layer}".format(
                    left_layer=left_layer,
                    left_name=left_layer.name,
                    left_out_shape=left_out_shape,
                    right_layer=right_layer,
                    right_name=right_layer.name,
                    right_in_shape=right_in_shape,
                ))
Ejemplo n.º 20
0
    def get_output_shape(self, *input_shapes):
        input_shapes = [tf.TensorShape(shape) for shape in input_shapes]
        first_shape = input_shapes[0]

        if len(input_shapes) < 2:
            raise LayerConnectionError(
                "Layer `{}` expected multiple inputs. Input shapes: {}"
                "".format(self.name, tf_utils.shape_to_tuple(input_shapes)))

        if any(shape.ndims is None for shape in input_shapes):
            return tf.TensorShape(None)

        for shape in input_shapes:
            if not shape.is_compatible_with(first_shape):
                formatted_shapes = tf_utils.shape_to_tuple(input_shapes)
                raise LayerConnectionError(
                    "Input shapes to the `{}` layer have incompatible shapes. "
                    "Input shapes: {}, Layer: {}"
                    "".format(self.name, formatted_shapes, self))

        return first_shape
Ejemplo n.º 21
0
    def get_output_shape(self, input_shape):
        input_shape = tf.TensorShape(input_shape)

        if input_shape and max(self.alpha_axes) >= input_shape.ndims:
            max_axis_index = input_shape.ndims - 1

            raise LayerConnectionError(
                "Cannot specify alpha for the axis #{}. Maximum "
                "available axis is {} (0-based indices)."
                "".format(max(self.alpha_axes), max_axis_index))

        return super(PRelu, self).get_output_shape(input_shape)
Ejemplo n.º 22
0
    def create_variables(self, input_shape):
        input_shape = tf.TensorShape(input_shape)

        if input_shape.ndims is None:
            raise WeightInitializationError(
                "Cannot initialize variables for the batch normalization "
                "layer, because input shape is undefined. Layer: {}"
                "".format(self))

        if self.axes is None:
            # If ndims == 4 then axes = (0, 1, 2)
            # If ndims == 2 then axes = (0,)
            self.axes = tuple(range(input_shape.ndims - 1))

        if any(axis >= input_shape.ndims for axis in self.axes):
            raise LayerConnectionError(
                "Batch normalization cannot be applied over one of "
                "the axis, because input has only {} dimensions. Layer: {}"
                "".format(input_shape.ndims, self))

        parameter_shape = tuple([
            input_shape[axis].value if axis not in self.axes else 1
            for axis in range(input_shape.ndims)
        ])

        if any(parameter is None for parameter in parameter_shape):
            unknown_dim_index = parameter_shape.index(None)

            raise WeightInitializationError(
                "Cannot create variables for batch normalization, because "
                "input has unknown dimension #{} (0-based indices). "
                "Input shape: {}, Layer: {}".format(unknown_dim_index,
                                                    input_shape, self))

        self.input_shape = input_shape
        self.running_mean = self.variable(value=self.running_mean,
                                          shape=parameter_shape,
                                          name='running_mean',
                                          trainable=False)

        self.running_inv_std = self.variable(value=self.running_inv_std,
                                             shape=parameter_shape,
                                             name='running_inv_std',
                                             trainable=False)

        self.gamma = self.variable(value=self.gamma,
                                   name='gamma',
                                   shape=parameter_shape)

        self.beta = self.variable(value=self.beta,
                                  name='beta',
                                  shape=parameter_shape)
Ejemplo n.º 23
0
    def get_output_shape(self, input_shape):
        input_shape = tf.TensorShape(input_shape)

        if self.n_units is None:
            return input_shape

        if input_shape and input_shape.ndims != 2:
            raise LayerConnectionError(
                "Input shape expected to have 2 dimensions, got {} instead. "
                "Shape: {}".format(input_shape.ndims, input_shape))

        n_samples = input_shape[0]
        return tf.TensorShape((n_samples, self.n_units))
Ejemplo n.º 24
0
    def get_output_shape(self, *input_shapes):
        input_shapes = [tf.TensorShape(shape) for shape in input_shapes]
        # The axis value has 0-based indeces where 0s index points
        # to the batch dimension of the input. Shapes in the neupy
        # do not store information about the batch and we need to
        # put None value on the 0s position.
        valid_shape = input_shapes[0]

        if any(shape.ndims is None for shape in input_shapes):
            return tf.TensorShape(None)

        # Avoid using negative indeces
        possible_axes = list(range(len(valid_shape)))
        concat_axis = possible_axes[self.axis]

        for input_shape in input_shapes[1:]:
            if len(valid_shape) != len(input_shape):
                raise LayerConnectionError(
                    "Cannot concatenate layers, because inputs have "
                    "different number of dimensions. Shapes: {} and {}"
                    "".format(valid_shape, input_shape))

            for axis, axis_size in enumerate(input_shape):
                if axis != concat_axis and valid_shape[axis] != axis_size:
                    raise LayerConnectionError(
                        "Cannot concatenate layers, because some of them "
                        "don't match over dimension #{} (0-based indeces). "
                        "Shapes: {} and {}"
                        "".format(axis, valid_shape, input_shape))

        output_shape = input_shapes.pop(0)
        output_shape = [dim.value for dim in output_shape.dims]

        for input_shape in input_shapes:
            output_shape[self.axis] += input_shape[self.axis]

        return tf.TensorShape(output_shape)
Ejemplo n.º 25
0
def merge(left_graph, right_graph, combine=False):
    """
    Merges two graphs into single one. When ``combine=False`` new
    connection won't be created. And when ``combine=True`` input layers
    from the ``left_graph`` will be combined to the output layers from
    ``right_graph``.

    Parameters
    ----------
    left_graph : layer, network
    right_graph : layer, network

    combine : bool
        Defaults to ``False``.
    """
    if combine:
        validate_graphs_before_combining(left_graph, right_graph)

    forward_graph = OrderedDict()

    for key, value in left_graph.forward_graph.items():
        # To make sure that we copied lists inside of the
        # dictionary, but didn't copied values inside of the list
        forward_graph[key] = copy.copy(value)

    for key, values in right_graph.forward_graph.items():
        if key in forward_graph:
            for value in values:
                if value not in forward_graph[key]:
                    forward_graph[key].append(value)
        else:
            forward_graph[key] = copy.copy(values)

    if combine:
        for left_out_layer in left_graph.output_layers:
            for right_in_layer in right_graph.input_layers:
                forward_graph[left_out_layer].append(right_in_layer)

    if is_cyclic(forward_graph):
        raise LayerConnectionError(
            "Cannot define connection between layers, because it creates "
            "cycle in the graph. Left graph: {}, Right graph: {}"
            "".format(left_graph, right_graph))

    return LayerGraph(forward_graph)
Ejemplo n.º 26
0
def repeat(network_or_layer, n):
    """
    Function copies input `n - 1` times and connects everything in sequential
    order.

    Parameters
    ----------
    network_or_layer : network or layer
        Layer or network (connection of layers).

    n : int
        Number of times input should be replicated.

    Examples
    --------
    >>> from neupy.layers import *
    >>>
    >>> block = Conv((3, 3, 32)) >> Relu() >> BN()
    >>> block
    <unknown> -> [... 3 layers ...] -> (?, ?, ?, 32)
    >>>
    >>> repeat(block, n=5)
    <unknown> -> [... 15 layers ...] -> (?, ?, ?, 32)
    """
    if n <= 0 or not isinstance(n, int):
        raise ValueError("The `n` parameter should be a positive integer, "
                         "got {} instead".format(n))

    if n == 1:
        return network_or_layer

    input_shape = network_or_layer.input_shape
    output_shape = network_or_layer.output_shape

    if not input_shape.is_compatible_with(output_shape):
        raise LayerConnectionError(
            "Cannot connect network/layer to its copy, because input "
            "shape is incompatible with the output shape. Input shape: {}, "
            "Output shape: {}".format(input_shape, output_shape))

    new_networks = [copy.deepcopy(network_or_layer) for _ in range(n - 1)]
    return join(network_or_layer, *new_networks)
Ejemplo n.º 27
0
 def fail_if_shape_invalid(self, input_shape):
     if input_shape and input_shape.ndims != 4:
         raise LayerConnectionError(
             "Pooling layer expects an input with 4 "
             "dimensions, got {} with shape {}. Layer: {}"
             "".format(len(input_shape), input_shape, self))
Ejemplo n.º 28
0
    def connect_layers(self, from_layers, to_layers):
        """
        Connect two layers together and update other layers
        in the graph.

        Parameters
        ----------
        from_layer : layer or list of layers
        to_layer : layer or list of layers

        Raises
        ------
        LayerConnectionError
            Raises if cannot graph cannot connect two layers.

        Returns
        -------
        bool
            Returns ``False`` if connection has already been added into
            the graph, and ``True`` if connection was added successfully.
        """
        if not isinstance(from_layers, (list, tuple)):
            from_layers = [from_layers]

        if not isinstance(to_layers, (list, tuple)):
            to_layers = [to_layers]

        connections_added = []
        do_not_have_shapes = True

        for from_layer in from_layers:
            if from_layer.input_shape or from_layer.output_shape:
                do_not_have_shapes = False

            for to_layer in to_layers:
                connection_added = self.add_connection(from_layer, to_layer)
                connections_added.append(connection_added)

        if not any(connections_added):
            return False

        if do_not_have_shapes:
            return True

        # Layer has an input shape which means that we can
        # propagate this information through the graph and
        # set up input shape for layers that don't have it.
        layers = copy.copy(from_layers)
        forward_graph = self.forward_graph

        # We need to know whether all input layers
        # have defined input shape
        all_inputs_has_shape = all(layer.input_shape
                                   for layer in self.input_layers)

        while layers:
            current_layer = layers.pop()
            next_layers = forward_graph[current_layer]

            for next_layer in next_layers:
                next_inp_shape = next_layer.input_shape
                current_out_shape = current_layer.output_shape
                expect_one_input = does_layer_expect_one_input(next_layer)

                if not next_inp_shape and expect_one_input:
                    next_layer.input_shape = current_out_shape
                    next_layer.initialize()

                elif not expect_one_input and all_inputs_has_shape:
                    input_shapes = []
                    for incoming_layer in self.backward_graph[next_layer]:
                        input_shapes.append(incoming_layer.output_shape)

                    if None not in input_shapes:
                        next_layer.input_shape = input_shapes
                        next_layer.initialize()

                    else:
                        # Some of the previous layers still don't
                        # have input shape. We can put layer at the
                        # end of the stack and check it again at the end
                        layers.insert(0, current_layer)

                elif expect_one_input and next_inp_shape != current_out_shape:
                    raise LayerConnectionError(
                        "Cannot connect `{}` to the `{}`. Output shape "
                        "from one layer is equal to {} and input shape "
                        "to the next one is equal to {}".format(
                            current_layer,
                            next_layer,
                            current_out_shape,
                            next_inp_shape,
                        ))

            layers.extend(next_layers)

        return True
Ejemplo n.º 29
0
 def validate(self, input_shape):
     if len(input_shape) < 2:
         raise LayerConnectionError(
             "Transpose expects input with at least 3 dimensions.")
Ejemplo n.º 30
0
 def fail_if_shape_invalid(self, input_shape):
     if input_shape and max(self.perm) >= input_shape.ndims:
         raise LayerConnectionError(
             "Cannot apply transpose operation to the "
             "input. Permuntation: {}, Input shape: {}"
             "".format(self.perm, input_shape))