コード例 #1
0
def predictor_from_cntk_model(modelFile, plotModel=False):
    """Loads a CNTK model and returns an ell.neural.NeuralNetworkPredictor"""

    _logger = logger.get()
    _logger.info("Loading...")
    z = load_model(modelFile)
    _logger.info("\nFinished loading.")

    if plotModel:
        filename = os.path.join(os.path.dirname(modelFile), os.path.basename(modelFile) + ".svg")
        cntk_utilities.plot_model(z, filename)

    _logger.info("Pre-processing...")
    modelLayers = cntk_utilities.get_model_layers(z)

    # Get the relevant CNTK layers that we will convert to ELL
    layersToConvert = cntk_layers.get_filtered_layers_list(modelLayers)
    _logger.info("\nFinished pre-processing.")

    predictor = None

    try:
        # Create a list of ELL layers from the CNTK layers
        ellLayers = cntk_layers.convert_cntk_layers_to_ell_layers(
            layersToConvert)
        # Create an ELL neural network predictor from the layers
        predictor = ell.neural.NeuralNetworkPredictor(ellLayers)
    except BaseException as exception:
        _logger.error("Error occurred attempting to convert cntk layers to ELL layers: " + str(exception))
        raise exception

    return predictor
コード例 #2
0
    def __init__(self, layer):
        if not layer.is_block:
            raise ValueError(
                "Error: Convolution layer node is not in block node")

        self.op_name = 'Convolution'
        # initialize weights and input characteristics
        self.input_parameter = layer.arguments[0]
        self.weights_parameter = utilities.find_parameter_by_name(
            layer.parameters, 'W', 0)
        self.bias_parameter = utilities.find_parameter_by_name(
            layer.parameters, 'b', 1)

        # Get the hyper-parameters for the convolution.
        # They are on the convolution node inside this block.
        convolution_nodes = depth_first_search(
            layer.block_root,
            lambda x: utilities.op_name_equals(x, 'Convolution'))

        self.attributes = convolution_nodes[0].attributes
        self.convolution_method = 0
        self.input_shape = self.input_parameter.shape

        super().__init__(layer)
        nodes = utilities.get_model_layers(layer.block_root)
        if utilities.is_softmax_activation(nodes):
            self.additional_layer_text = 'softmax'
        else:
            activation_type = utilities.get_cntk_activation_name(nodes)
            if activation_type:
                self.additional_layer_text = activation_type
コード例 #3
0
    def process(self, ellLayers):
        """Appends the ELL equivalent of the current layer to ellLayers."""

        # Note that a single CNTK Dense function block is equivalent to the following 3 ELL layers:
        # - FullyConnectedLayer
        # - BiasLayer
        # - ActivationLayer. This layer is sometimes missing, depending on activation type.
        #
        # Therefore, make sure the output padding characteristics of the last layer reflect the next layer's
        # padding requirements.

        weightsParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'W', 0)
        biasParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'b', 1)
        weightsTensor = converters.get_tensor_from_cntk_dense_weight_parameter(
            weightsParameter)
        biasVector = converters.get_vector_from_cntk_trainable_parameter(
            biasParameter)

        # Create the ell.neural.LayerParameters for the various ELL layers
        firstLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters, self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        middleLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        lastLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters, ell.nodes.PortType.smallReal)

        layerParameters = firstLayerParameters

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_ell_activation_type(internalNodes)

        # Create the ELL fully connected layer
        ellLayers.append(ell.neural.FullyConnectedLayer(
            layerParameters, weightsTensor))

        # Create the ELL bias layer
        if (utilities.is_softmax_activation(internalNodes) or activationType is not None):
            layerParameters = middleLayerParameters
        else:
            layerParameters = lastLayerParameters
        ellLayers.append(ell.neural.BiasLayer(layerParameters, biasVector))

        # Create the ELL activation layer
        if (utilities.is_softmax_activation(internalNodes) or activationType is not None):
            layerParameters = lastLayerParameters

            # Special case: if this is softmax activation, create an ELL Softmax layer.
            # Else, insert an ELL ActivationLayer
            if (utilities.is_softmax_activation(internalNodes)):
                ellLayers.append(ell.neural.SoftmaxLayer(layerParameters))
            else:
                if (activationType is not None):
                    ellLayers.append(ell.neural.ActivationLayer(
                        layerParameters, activationType))
コード例 #4
0
    def __init__(self, layer):
        if not layer.is_block:
            raise ValueError("Dense node is not a block node")

        self.op_name = 'Dense'
        super().__init__(layer)
        internalNodes = utilities.get_model_layers(self.layer.block_root)
        self.additional_layer_text = utilities.get_cntk_activation_name(internalNodes)
コード例 #5
0
    def clone_cntk_layer(self, feature):
        """Returns a clone of the CNTK layer for per-layer forward prop validation"""
        weightsParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'W', 0)
        biasParameter = utilities.find_parameter_by_name(
            self.layer.parameters, 'b', 1)

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_cntk_activation_op(internalNodes)

        includeBias = biasParameter is not None
        layer = Dense(self.layer.shape, activation=activationType, bias=includeBias)(feature)

        layer.parameters[0].value = weightsParameter.value
        if includeBias:
            layer.parameters[1].value = biasParameter.value
        return layer
コード例 #6
0
    def clone_cntk_layer(self, feature):
        """Returns a clone of the CNTK layer for per-layer forward prop validation"""

        nodes = utilities.get_model_layers(self.layer.block_root)
        activation = utilities.get_cntk_activation_op(nodes)

        weightsShape = self.weights_parameter.shape
        pad = self.attributes['autoPadding'][0] or (
            self.attributes['autoPadding'][1] and self.attributes['autoPadding'][2])
        bias = (self.bias_parameter is not None)

        layer = Convolution((weightsShape[2], weightsShape[3]), weightsShape[0],
                            pad=pad, activation=activation, bias=bias)(feature)

        layer.parameters[0].value = self.weights_parameter.value
        if bias:
            layer.parameters[1].value = self.bias_parameter.value
        return layer
コード例 #7
0
ファイル: cntk_full_model_test.py プロジェクト: n-gineer/ELL
    def run(self):
        self.report = open("report.md", "w")
        self.report.write("# Comparison Results\n")
        self.report.write("**model**: %s\n\n" % (self.model_file))
        if self.image_file is not None:
            self.image = self.load_image(self.image_file)
            self.report.write("**image**: %s\n\n" % (self.image_file))

        self.cntk_model = cntk.load_model(self.model_file)
        modelLayers = cntk_utilities.get_model_layers(self.cntk_model)
        # Get the relevant CNTK layers that we will convert to ELL
        layersToConvert = cntk_layers.get_filtered_layers_list(modelLayers)
        self.logger.info(
            "----------------------------------------------------------------------------------"
        )
        if self.layers:
            for layer in layersToConvert:
                self.compare_layer(layer)
        else:
            self.compare_model(layersToConvert)

        self.print_top_result()
        self.report.close()
コード例 #8
0
    def process(self, ellLayers):
        """Helper to convert a convolutional layer to the ELL equivalent."""

        # Note that a single CNTK Convolutional function block is equivalent to the following 3 ELL layers:
        # - ConvolutionalLayer
        # - BiasLayer. This layer is sometimes missing, depending on whether bias is included.
        # - ActivationLayer. This layer is sometimes missing, depending on activation type.
        #
        # Therefore, make sure the output padding characteristics of the last layer reflect the next layer's
        # padding requirements.

        weightsTensor = converters.get_tensor_from_cntk_convolutional_weight_parameter(
            self.weights_parameter)

        internalNodes = utilities.get_model_layers(self.layer.block_root)
        activationType = utilities.get_ell_activation_type(internalNodes)
        isSoftmaxActivation = utilities.is_softmax_activation(internalNodes)
        hasActivation = isSoftmaxActivation or activationType != None
        hasBias = self.bias_parameter != None

        # Create the ell.neural.LayerParameters for the various ELL layers
        onlyLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters,
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters,
            ell.nodes.PortType.smallReal)
        firstLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_inputShape, self.layer.ell_inputPaddingParameters,
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            ell.nodes.PortType.smallReal)
        middleLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), self.layer.ell_outputShapeMinusPadding,
            ell.neural.NoPadding(), ell.nodes.PortType.smallReal)
        lastLayerParameters = ell.neural.LayerParameters(
            self.layer.ell_outputShapeMinusPadding, ell.neural.NoPadding(),
            self.layer.ell_outputShape, self.layer.ell_outputPaddingParameters,
            ell.nodes.PortType.smallReal)

        # Choose the layer parameters for the convolutional layer. If there is
        # bias or activation, then the convolution is the first of two or more,
        # otherwise it is the only layer
        if hasActivation or hasBias:
            layerParameters = firstLayerParameters
        else:
            layerParameters = onlyLayerParameters

        # Fill in the convolutional parameters
        weightsShape = self.weights_parameter.shape
        receptiveField = weightsShape[2]
        stride = self.attributes['strides'][2]

        filterBatchSize = layerParameters.outputShape.channels

        convolutionalParameters = ell.neural.ConvolutionalParameters(
            receptiveField, stride, self.convolution_method, filterBatchSize)

        # Create the ELL convolutional layer
        ellLayers.append(
            ell.neural.ConvolutionalLayer(layerParameters,
                                          convolutionalParameters,
                                          weightsTensor))

        # Create the ELL bias layer
        if hasBias:
            if hasActivation:
                layerParameters = middleLayerParameters
            else:
                layerParameters = lastLayerParameters
            biasVector = converters.get_vector_from_cntk_trainable_parameter(
                self.bias_parameter)
            ellLayers.append(ell.neural.BiasLayer(layerParameters, biasVector))

        # Create the ELL activation layer
        if hasActivation:
            layerParameters = lastLayerParameters

            # Special case: if this is softmax activation, create an ELL Softmax layer.
            # Else, insert an ELL ActivationLayer
            if (isSoftmaxActivation):
                ellLayers.append(ell.neural.SoftmaxLayer(layerParameters))
            else:
                ellLayers.append(
                    ell.neural.ActivationLayer(layerParameters,
                                               activationType))