Beispiel #1
0
    def process(self, ellLayers):
        """Appends the ELL equivalent of the current layer to ellLayers."""

        # Note that a single CNTK Dense function block is equivalent to the following 3 ELL layers:
        # - FullyConnectedLayer
        # - BiasLayer
        # - ActivationLayer. This layer is sometimes missing, depending on activation type.
        #
        # Therefore, make sure the output padding characteristics of the last layer reflect the next layer's
        # padding requirements.

        weightsParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'W', 0)
        biasParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'b', 1)
        weightsTensor = converters.get_float_tensor_from_cntk_dense_weight_parameter(
            weightsParameter)
        biasVector = converters.get_float_vector_from_cntk_trainable_parameter(
            biasParameter)

        # Create the ell.LayerParameters for the various ELL layers
        firstLayerParameters = ell.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters, self.layer.ell_outputShapeMinusPadding, ell.NoPadding())
        middleLayerParameters = ell.LayerParameters(self.layer.ell_outputShapeMinusPadding, ell.NoPadding(
        ), self.layer.ell_outputShapeMinusPadding, ell.NoPadding())
        lastLayerParameters = ell.LayerParameters(self.layer.ell_outputShapeMinusPadding, ell.NoPadding(
        ), self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters)

        layerParameters = firstLayerParameters

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_ell_activation_type(internalNodes)

        # Create the ELL fully connected layer
        ellLayers.append(ell.FloatFullyConnectedLayer(
            layerParameters, weightsTensor))

        # Create the ELL bias layer
        if (utilities.is_softmax_activation(internalNodes) or activationType != None):
            layerParameters = middleLayerParameters
        else:
            layerParameters = lastLayerParameters
        ellLayers.append(ell.FloatBiasLayer(layerParameters, biasVector))

        # Create the ELL activation layer
        if (utilities.is_softmax_activation(internalNodes) or activationType != None):
            layerParameters = lastLayerParameters

            # Special case: if this is softmax activation, create an ELL Softmax layer.
            # Else, insert an ELL ActivationLayer
            if (utilities.is_softmax_activation(internalNodes)):
                ellLayers.append(ell.FloatSoftmaxLayer(layerParameters))
            else:
                if (activationType != None):
                    ellLayers.append(ell.FloatActivationLayer(
                        layerParameters, activationType))
Beispiel #2
0
    def __init__(self, layer):
        if not layer.is_block:
            raise ValueError(
                "Error: Convolution layer node is not in block node")

        self.op_name = 'Convolution'
        # initialize weights and input characteristics
        self.input_parameter = layer.arguments[0]
        self.weights_parameter = utilities.find_parameter_by_name(
            layer.parameters, 'W', 0)
        self.bias_parameter = utilities.find_parameter_by_name(
            layer.parameters, 'b', 1)

        # Get the hyper-parameters for the convolution.
        # They are on the convolution node inside this block.
        convolution_nodes = depth_first_search(
            layer.block_root,
            lambda x: utilities.op_name_equals(x, 'Convolution'))

        self.attributes = convolution_nodes[0].attributes
        self.convolution_method = 0
        self.input_shape = self.input_parameter.shape

        super().__init__(layer)
        nodes = utilities.get_model_layers(layer.block_root)
        if utilities.is_softmax_activation(nodes):
            self.additional_layer_text = 'softmax'
        else:
            activation_type = utilities.get_cntk_activation_name(nodes)
            if activation_type:
                self.additional_layer_text = activation_type
Beispiel #3
0
    def __repr__(self):
        """Prints summary info about this layer."""

        label = self.op_name
        nodes = utilities.get_model_layers(self.layer.block_root)
        if utilities.is_softmax_activation(nodes):
            label += "(softmax)"
        else:
            activation_type = utilities.get_activation_type(nodes)
            if activation_type is not None:
                label += "(" + utilities.ell_activation_type_to_string(
                    activation_type) + ")"

        return " ".join(
            (label, ": ",
             utilities.ell_shape_to_string(self.layer.ell_inputShape), " -> ",
             utilities.ell_shape_to_string(self.layer.ell_outputShape),
             "| input padding",
             str(self.layer.ell_inputPaddingParameters.paddingSize),
             " output padding",
             str(self.layer.ell_outputPaddingParameters.paddingSize)))
Beispiel #4
0
    def process(self, ellLayers):
        """Helper to convert a convolutional layer to the ELL equivalent."""

        # Note that a single CNTK Convolutional function block is equivalent to the following 3 ELL layers:
        # - ConvolutionalLayer
        # - BiasLayer
        # - ActivationLayer. This layer is sometimes missing, depending on activation type.
        #
        # Therefore, make sure the output padding characteristics of the last layer reflect the next layer's
        # padding requirements.

        weightsTensor = converters.get_float_tensor_from_cntk_convolutional_weight_parameter(
            self.weights_parameter)
        biasVector = converters.get_float_vector_from_cntk_trainable_parameter(
            self.bias_parameter)

        # Create the ELL.LayerParameters for the various ELL layers
        firstLayerParameters = ELL.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters,
            self.layer.ell_outputShapeMinusPadding, ELL.NoPadding())
        middleLayerParameters = ELL.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ELL.NoPadding(),
            self.layer.ell_outputShapeMinusPadding, ELL.NoPadding())
        lastLayerParameters = ELL.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ELL.NoPadding(),
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters)

        layerParameters = firstLayerParameters

        # Fill in the convolutional parameters
        weightsShape = self.weights_parameter.shape
        receptiveField = weightsShape[2]
        stride = self.attributes['strides'][2]

        filterBatchSize = layerParameters.outputShape.channels

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_ell_activation_type(internalNodes)

        convolutionalParameters = ELL.ConvolutionalParameters(
            receptiveField, stride, self.convolution_method, filterBatchSize)

        # Create the ELL convolutional layer
        ellLayers.append(
            ELL.FloatConvolutionalLayer(layerParameters,
                                        convolutionalParameters,
                                        weightsTensor))

        # Create the ELL bias layer
        isSoftmaxActivation = utilities.is_softmax_activation(internalNodes)
        hasActivation = isSoftmaxActivation or activationType != None
        if (hasActivation):
            layerParameters = middleLayerParameters
        else:
            layerParameters = lastLayerParameters
        ellLayers.append(ELL.FloatBiasLayer(layerParameters, biasVector))

        # Create the ELL activation layer
        if (hasActivation):
            layerParameters = lastLayerParameters

            # Special case: if this is softmax activation, create an ELL Softmax layer.
            # Else, insert an ELL ActivationLayer
            if (isSoftmaxActivation):
                ellLayers.append(ELL.FloatSoftmaxLayer(layerParameters))
            else:
                ellLayers.append(
                    ELL.FloatActivationLayer(layerParameters, activationType))