Exemplo n.º 1
0
    def process(self, ellLayers):
        """Appends the ELL equivalent of the current layer to ellLayers."""

        # Note that a single CNTK Dense function block is equivalent to the following 3 ELL layers:
        # - FullyConnectedLayer
        # - BiasLayer
        # - ActivationLayer. This layer is sometimes missing, depending on activation type.
        #
        # Therefore, make sure the output padding characteristics of the last layer reflect the next layer's
        # padding requirements.

        weightsParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'W', 0)
        biasParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'b', 1)
        weightsTensor = converters.get_tensor_from_cntk_dense_weight_parameter(
            weightsParameter)
        biasVector = converters.get_vector_from_cntk_trainable_parameter(
            biasParameter)

        # Create the ell.neural.LayerParameters for the various ELL layers
        firstLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters, self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        middleLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        lastLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters, ell.nodes.PortType.smallReal)

        layerParameters = firstLayerParameters

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_ell_activation_type(internalNodes)

        # Create the ELL fully connected layer
        ellLayers.append(ell.neural.FullyConnectedLayer(
            layerParameters, weightsTensor))

        # Create the ELL bias layer
        if (utilities.is_softmax_activation(internalNodes) or activationType is not None):
            layerParameters = middleLayerParameters
        else:
            layerParameters = lastLayerParameters
        ellLayers.append(ell.neural.BiasLayer(layerParameters, biasVector))

        # Create the ELL activation layer
        if (utilities.is_softmax_activation(internalNodes) or activationType is not None):
            layerParameters = lastLayerParameters

            # Special case: if this is softmax activation, create an ELL Softmax layer.
            # Else, insert an ELL ActivationLayer
            if (utilities.is_softmax_activation(internalNodes)):
                ellLayers.append(ell.neural.SoftmaxLayer(layerParameters))
            else:
                if (activationType is not None):
                    ellLayers.append(ell.neural.ActivationLayer(
                        layerParameters, activationType))
Exemplo n.º 2
0
    def process(self, ellLayers):
        """Appends the ELL representation of the current layer to ellLayers."""

        # Note that a single CNTK Batch Normalization layer is equivalent to the following 3 ELL layers:
        # - BatchNormalizationLayer
        # - ScalingLayer
        # - BiasLayer
        #
        # Therefore, make sure the output padding characteristics of the last layer reflect the next layer's
        # padding requirements.

        scaleVector = converters.get_vector_from_cntk_trainable_parameter(
            self.scale)
        biasVector = converters.get_vector_from_cntk_trainable_parameter(
            self.bias)
        meanVector = converters.get_vector_from_cntk_trainable_parameter(
            self.mean)
        varianceVector = converters.get_vector_from_cntk_trainable_parameter(
            self.variance)

        # Create the ell.neural.LayerParameters for the various ELL layers
        firstLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters,
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            ell.nodes.PortType.smallReal)
        middleLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        lastLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters,
            ell.nodes.PortType.smallReal)

        # Create the layers
        ellLayers.append(
            ell.neural.BatchNormalizationLayer(
                firstLayerParameters, meanVector, varianceVector, self.epsilon,
                ell.neural.EpsilonSummand.variance))
        ellLayers.append(
            ell.neural.ScalingLayer(middleLayerParameters, scaleVector))
        ellLayers.append(ell.neural.BiasLayer(lastLayerParameters, biasVector))
Exemplo n.º 3
0
    def process(self, ellLayers):
        """Appends the ELL representation of the current layer to ellLayers."""

        biasVector = converters.get_vector_from_cntk_trainable_parameter(
            self.layer.parameters[0])

        # Create the ell.neural.LayerParameters for the ELL layer
        layerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters, self.layer.ell_outputShape,
            self.layer.ell_outputPaddingParameters, ell.nodes.PortType.smallReal)

        # Create the ELL bias layer
        ellLayers.append(ell.neural.BiasLayer(layerParameters, biasVector))
Exemplo n.º 4
0
    def process(self, ellLayers):
        """Helper to convert a convolutional layer to the ELL equivalent."""

        # Note that a single CNTK Convolutional function block is equivalent to the following 3 ELL layers:
        # - ConvolutionalLayer
        # - BiasLayer. This layer is sometimes missing, depending on whether bias is included.
        # - ActivationLayer. This layer is sometimes missing, depending on activation type.
        #
        # Therefore, make sure the output padding characteristics of the last layer reflect the next layer's
        # padding requirements.

        weightsTensor = converters.get_tensor_from_cntk_convolutional_weight_parameter(
            self.weights_parameter)

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_ell_activation_type(internalNodes)
        isSoftmaxActivation = utilities.is_softmax_activation(internalNodes)
        hasActivation = isSoftmaxActivation or activationType != None
        hasBias = self.bias_parameter != None

        # Create the ell.neural.LayerParameters for the various ELL layers
        onlyLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters,
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters,
            ell.nodes.PortType.smallReal)
        firstLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters,
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            ell.nodes.PortType.smallReal)
        middleLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        lastLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters,
            ell.nodes.PortType.smallReal)

        # Choose the layer parameters for the convolutional layer. If there is
        # bias or activation, then the convolution is the first of two or more,
        # otherwise it is the only layer
        if hasActivation or hasBias:
            layerParameters = firstLayerParameters
        else:
            layerParameters = onlyLayerParameters

        # Fill in the convolutional parameters
        weightsShape = self.weights_parameter.shape
        receptiveField = weightsShape[2]
        stride = self.attributes['strides'][2]

        filterBatchSize = layerParameters.outputShape.channels

        convolutionalParameters = ell.neural.ConvolutionalParameters(
            receptiveField, stride, self.convolution_method, filterBatchSize)

        # Create the ELL convolutional layer
        ellLayers.append(
            ell.neural.ConvolutionalLayer(layerParameters,
                                          convolutionalParameters,
                                          weightsTensor))

        # Create the ELL bias layer
        if hasBias:
            if hasActivation:
                layerParameters = middleLayerParameters
            else:
                layerParameters = lastLayerParameters
            biasVector = converters.get_vector_from_cntk_trainable_parameter(
                self.bias_parameter)
            ellLayers.append(ell.neural.BiasLayer(layerParameters, biasVector))

        # Create the ELL activation layer
        if hasActivation:
            layerParameters = lastLayerParameters

            # Special case: if this is softmax activation, create an ELL Softmax layer.
            # Else, insert an ELL ActivationLayer
            if (isSoftmaxActivation):
                ellLayers.append(ell.neural.SoftmaxLayer(layerParameters))
            else:
                ellLayers.append(
                    ell.neural.ActivationLayer(layerParameters,
                                               activationType))