Beispiel #1
0
    def __init__(self, layer):
        if not layer.is_block:
            raise ValueError(
                "Error: Convolution layer node is not in block node")

        self.op_name = 'Convolution'
        # initialize weights and input characteristics
        self.input_parameter = layer.arguments[0]
        self.weights_parameter = utilities.find_parameter_by_name(
            layer.parameters, 'W', 0)
        self.bias_parameter = utilities.find_parameter_by_name(
            layer.parameters, 'b', 1)

        # Get the hyper-parameters for the convolution.
        # They are on the convolution node inside this block.
        convolution_nodes = depth_first_search(
            layer.block_root,
            lambda x: utilities.op_name_equals(x, 'Convolution'))

        self.attributes = convolution_nodes[0].attributes
        self.convolution_method = 0
        self.input_shape = self.input_parameter.shape

        super().__init__(layer)
        nodes = utilities.get_model_layers(layer.block_root)
        if utilities.is_softmax_activation(nodes):
            self.additional_layer_text = 'softmax'
        else:
            activation_type = utilities.get_cntk_activation_name(nodes)
            if activation_type:
                self.additional_layer_text = activation_type
Beispiel #2
0
    def process(self, ellLayers):
        """Appends the ELL equivalent of the current layer to ellLayers."""

        # Note that a single CNTK Dense function block is equivalent to the following 3 ELL layers:
        # - FullyConnectedLayer
        # - BiasLayer
        # - ActivationLayer. This layer is sometimes missing, depending on activation type.
        #
        # Therefore, make sure the output padding characteristics of the last layer reflect the next layer's
        # padding requirements.

        weightsParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'W', 0)
        biasParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'b', 1)
        weightsTensor = converters.get_tensor_from_cntk_dense_weight_parameter(
            weightsParameter)
        biasVector = converters.get_vector_from_cntk_trainable_parameter(
            biasParameter)

        # Create the ell.neural.LayerParameters for the various ELL layers
        firstLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters, self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        middleLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        lastLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters, ell.nodes.PortType.smallReal)

        layerParameters = firstLayerParameters

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_ell_activation_type(internalNodes)

        # Create the ELL fully connected layer
        ellLayers.append(ell.neural.FullyConnectedLayer(
            layerParameters, weightsTensor))

        # Create the ELL bias layer
        if (utilities.is_softmax_activation(internalNodes) or activationType is not None):
            layerParameters = middleLayerParameters
        else:
            layerParameters = lastLayerParameters
        ellLayers.append(ell.neural.BiasLayer(layerParameters, biasVector))

        # Create the ELL activation layer
        if (utilities.is_softmax_activation(internalNodes) or activationType is not None):
            layerParameters = lastLayerParameters

            # Special case: if this is softmax activation, create an ELL Softmax layer.
            # Else, insert an ELL ActivationLayer
            if (utilities.is_softmax_activation(internalNodes)):
                ellLayers.append(ell.neural.SoftmaxLayer(layerParameters))
            else:
                if (activationType is not None):
                    ellLayers.append(ell.neural.ActivationLayer(
                        layerParameters, activationType))
Beispiel #3
0
    def __init__(self, layer):
        self.op_name = 'BatchNormalization'

        self.scale = utilities.find_parameter_by_name(layer.parameters,
                                                      'scale', 0)
        self.bias = utilities.find_parameter_by_name(layer.parameters, 'bias',
                                                     1)
        self.mean = utilities.find_parameter_by_name(layer.constants,
                                                     'aggregate_mean', 0)
        self.variance = utilities.find_parameter_by_name(
            layer.constants, 'aggregate_variance', 1)

        # The default CNTK epsilon
        self.epsilon = 1e-5

        super().__init__(layer)
Beispiel #4
0
    def clone_cntk_layer(self, feature):
        """Returns a clone of the CNTK layer for per-layer forward prop validation"""
        weightsParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'W', 0)
        biasParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'b', 1)

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_cntk_activation_op(internalNodes)

        includeBias = biasParameter is not None
        layer = Dense(self.layer.shape, activation=activationType, bias=includeBias)(feature)

        layer.parameters[0].value = weightsParameter.value
        if includeBias:
            layer.parameters[1].value = biasParameter.value
        return layer
Beispiel #5
0
    def __init__(self, layer):
        if layer.is_block:
            raise ValueError(
                "Error: Binary Convolution layer node is in block node")

        self.op_name = 'BinaryConvolution'

        # Convolution function (ASSUME part of a Binary Convolution layer)
        # - Weights is 4-dimensional (filters, channels, rows, columns)
        # - Input is 3-dimensional (channels, rows, columns)
        # - Bias is a separate layer and not processed by this class
        # - Activation is a separate layer and not processed by this class
        if len(layer.inputs[0].shape) == 3:
            self.input_parameter = layer.inputs[0]
            weights_input = layer.inputs[1]
        else:
            self.input_parameter = layer.inputs[1]
            weights_input = layer.inputs[0]

        self.weights_parameter = utilities.find_parameter_by_name(
            weights_input.owner.parameters, 'filter')
        self.attributes = layer.attributes

        # Determine the binarization method used for weights based on the
        # name attributes of the UserFunctions defined in the custom_functions.py
        # used during training.
        # Until we can find a better heuristic, assume that the custom function names
        # don't change across models.
        function_name = weights_input.owner.name
        if function_name == 'Sign':
            self.convolution_method = ell.neural.BinaryConvolutionMethod.bitwise
            self.weights_scale = ell.neural.BinaryWeightsScale.none
        else:
            raise ValueError("Error: unrecognized binarization function: " +
                             function_name)

        self.input_shape = self.input_parameter.shape

        super().__init__(layer)
Beispiel #6
0
 def __init__(self, layer):
     self.op_name = 'PReLU'
     super().__init__(layer)
     self.prelu_parameter = utilities.find_parameter_by_name(
         self.layer.parameters, 'prelu', 0)