Ejemplo n.º 1
0
 def _find_layer_config(self, layer_name: str):
     for idx, layer in enumerate(self._model_config['layers']):
         layer_name_ = layer['name'] if is_functional_model(self._model) \
             else layer['config']['name']
         if layer_name_ == layer_name:
             return idx, layer
     return None, None
Ejemplo n.º 2
0
def apply_insert_after(model):
    converter = TFModelConverterFactory.create(model)
    transformations = TFTransformationLayout()
    qconfig = QuantizerConfig(num_bits=8,
                              mode=QuantizationMode.SYMMETRIC,
                              signedness_to_force=None,
                              per_channel=False)

    functional_model = is_functional_model(model)
    for i, layer in enumerate(model.layers):
        original_node_name = layer.name

        if functional_model:
            _, layer_info = converter.get_layer_info_for_node(
                original_node_name)
            instance_idx = layer_info.instance_idx
        else:
            instance_idx = 0

        fake_quantize_name = f'FakeQuantize_{i}/{original_node_name}'
        fake_quantize_layer = FakeQuantize(TFQuantizerSpec.from_config(
            qconfig, narrow_range=False, half_range=False),
                                           name=fake_quantize_name)

        transformations.register(
            TFInsertionCommand(
                target_point=commands.TFAfterLayer(original_node_name,
                                                   instance_idx=instance_idx,
                                                   output_port_id=0),
                callable_object=fake_quantize_layer,
                priority=TransformationPriority.QUANTIZATION_PRIORITY))

    transformer = TFModelTransformer(model)
    transformed_model = transformer.transform(transformations)
    return transformed_model
Ejemplo n.º 3
0
    def _replace_config(self, layer_name: str, replace_layer_config: Dict):
        replace_layer_name = replace_layer_config['config']['name']
        if is_functional_model(self._model):
            if 'name' not in replace_layer_config:
                replace_layer_config['name'] = replace_layer_name
            self._replace_functional(layer_name, replace_layer_config)
        else:
            self._replace_sequential(layer_name, replace_layer_config)

        self._update_layer_mapping(layer_name, replace_layer_name)
Ejemplo n.º 4
0
    def create(model) -> TFModelConverter:
        func_model = is_functional_model(model)
        seq_model = is_sequential_model(model)

        if not func_model and not seq_model:
            RuntimeError('Only sequential or functional models are supported')

        if func_model:
            converter = FunctionalConverter(model)
        else:
            converter = SequentialConverter(model)
        return converter
Ejemplo n.º 5
0
    def _shared_insert_layers(self, target_points: List[TargetPoint], layers: List[Callable]):
        functional_model = is_functional_model(self._model)
        if functional_model:
            for layer in self._model_config['input_layers']:
                for tp in target_points:
                    if isinstance(tp, TFBeforeLayer) and tp.layer_name == layer[0]:
                        raise RuntimeError(f'Insertion before input layer: {tp.layer_name} is not supported')

        layer_configs = []
        for layer in layers:
            config = tf.keras.utils.serialize_keras_object(layer)
            if functional_model:
                config['name'] = config['config']['name']
                config['inbound_nodes'] = []
                for i, tp in enumerate(target_points):
                    if isinstance(tp, TFAfterLayer):
                        config['inbound_nodes'].append([[tp.layer_name, tp.instance_idx, tp.output_port_id, {}]])
                    elif isinstance(tp, TFBeforeLayer):
                        idx, input_layer_cfg = self._find_layer_config(tp.layer_name)
                        inbound = [input_layer_cfg['inbound_nodes'][tp.instance_idx][tp.input_port_id]]
                        config['inbound_nodes'].append(inbound)
                        self._model_config['layers'][idx]['inbound_nodes'][tp.instance_idx][tp.input_port_id] = \
                                [config['name'], i, 0, {}]
                    else:
                        raise TypeError(
                            f'Insertion transform does not support {target_points[0].type} target point type')

            layer_configs.append(config)

        for config in layer_configs:
            for i, tp in enumerate(target_points):
                if functional_model and isinstance(tp, TFAfterLayer):
                    layer_out_ports = set()
                    replace_layer_name = config['name']
                    for layer in self._model_config['layers']:
                        for inbound_node in layer['inbound_nodes']:
                            self._process_insertion_after(inbound_node, tp.layer_name, tp.instance_idx,
                                                          layer_out_ports, replace_layer_name, i)

                    self._insert_after_model_outputs(tp.layer_name, tp.instance_idx, layer_out_ports,
                                                     replace_layer_name, i)
                    if len(layer_out_ports) > 1:
                        raise RuntimeError('Insertion after layer ({}) with multiple ports '
                                           'is not supported'.format(tp.layer_name))

            layer_name = target_points[0].layer_name
            self._insert_layer_after_sequential(layer_name, config)
Ejemplo n.º 6
0
    def _insert_layers_after(self, layer_name: str, instance_idx: int, output_port_id: int,
                             layers: List):
        functional_model = is_functional_model(self._model)

        layer_configs = []
        for layer in layers:
            config = tf.keras.utils.serialize_keras_object(layer)
            if functional_model:
                config['name'] = config['config']['name']
                config['inbound_nodes'] = [[[layer_name, instance_idx, output_port_id, {}]]]
            layer_configs.append(config)

        for config in layer_configs:
            if functional_model:
                self._insert_layer_after_functional(layer_name, instance_idx, config)
            else:
                self._insert_layer_after_sequential(layer_name, config)
Ejemplo n.º 7
0
def apply_insert_before(model):
    converter = TFModelConverterFactory.create(model)

    transformations = TFTransformationLayout()
    qconfig = QuantizerConfig(num_bits=8,
                              mode=QuantizationMode.SYMMETRIC,
                              signedness_to_force=None,
                              per_channel=False)

    functional_model = is_functional_model(model)
    for i, layer in enumerate(model.layers):
        # Insertion before input layer is not supported
        if isinstance(layer, layers.InputLayer):
            continue

        original_node_name = layer.name
        if functional_model:
            _, layer_info = converter.get_layer_info_for_node(
                original_node_name)
            instance_idx = layer_info.instance_idx
        else:
            instance_idx = 0

        inputs = [layer.input] if isinstance(layer.input,
                                             tf.Tensor) else layer.input

        for port, _ in enumerate(inputs):
            fake_quantize_name = f'FakeQuantize_{i}.{port}/{original_node_name}'
            fake_quantize_layer = FakeQuantize(TFQuantizerSpec.from_config(
                qconfig, narrow_range=False, half_range=False),
                                               name=fake_quantize_name)

            transformations.register(
                TFInsertionCommand(
                    target_point=commands.TFBeforeLayer(
                        original_node_name,
                        instance_idx=instance_idx,
                        input_port_id=port),
                    callable_object=fake_quantize_layer,
                    priority=TransformationPriority.QUANTIZATION_PRIORITY))

    transformer = TFModelTransformer(model)
    transformed_model = transformer.transform(transformations)
    return transformed_model
Ejemplo n.º 8
0
    def transform(self, transformation_layout: TFTransformationLayout):
        """ Applies transformations to the Keras model.

        :param transformation_layout: List of transformations
        :return: The transformed Keras model
        """
        layer_weights_map = {layer.name: self._get_layer_weights(layer) for layer in self._model.layers}

        for transform in transformation_layout.transformations:
            self._apply_transformation(transform)

        if is_functional_model(self._model):
            transformed_model = tf.keras.Model.from_config(self._model_config, self._custom_objects)
        else:
            transformed_model = tf.keras.Sequential.from_config(self._model_config, self._custom_objects)

        for layer in transformed_model.layers:
            weights = layer_weights_map.get(layer.name)
            if weights:
                self._set_layer_weights(layer, weights)

        return transformed_model
Ejemplo n.º 9
0
    def _insert_layers_before(self, layer_name: str, instance_idx: int, input_port_id: int,
                              layers: List):
        functional_model = is_functional_model(self._model)

        if functional_model:
            for layer in self._model_config['input_layers']:
                if layer_name == layer[0]:
                    raise RuntimeError('Insertion before input layer: {} is not supported'.format(layer_name))

        layer_configs = []
        idx, input_layer_cfg = self._find_layer_config(layer_name)
        for layer in layers:
            config = tf.keras.utils.serialize_keras_object(layer)
            if functional_model:
                config['name'] = config['config']['name']
                config['inbound_nodes'] = [input_layer_cfg['inbound_nodes'][instance_idx][input_port_id]]
                self._model_config['layers'][idx]['inbound_nodes'][instance_idx][input_port_id] = \
                    [config['name'], 0, 0, {}]
            layer_configs.append(config)

        for config in layer_configs:
            self._model_config['layers'].insert(idx, config)