コード例 #1
0
ファイル: quantize.py プロジェクト: codemogroup/Interview-Bot
  def _GetReluAndAddOperations(self, context, op):
    """Looks up a Relu* and Add operations in given context.

    Args:
      context: Context where to look for operations.
      op: Operation to quantize.

    Returns:
      A triplet (Operation, Operation, string), the first element is an end
      point operation, the second is Add operation (optional), the third element
      is string context where the Add operation was found (optional).

    Raises:
      ValueError: When operations cannot be found.
    """
    activation_op = common.GetEndpointActivationOp(self.graph, context)
    if activation_op:
      return activation_op, None, None

    if '/' in context:
      # If no activation op is there, look for them one level up.
      add_context = re.search(r'^(.*)/([^/]+)', context).group(1)
      activation_op = common.GetEndpointActivationOp(self.graph, add_context)
    if not activation_op:
      # Still no Relu, can happen on the top layer, just find the next node up,
      # make sure it is BiasAdd.
      consumers = [c for outp in op.outputs for c in outp.consumers()]
      if len(consumers) != 1 or consumers[0].type != 'BiasAdd':
        raise ValueError('Failed to quantize op: %s, %s' % (op.name, op.type))
      return consumers[0], None, None
    if add_context:
      add_op = self.GetOperationByNameDontThrow(add_context + '/Add')
      return activation_op, add_op, add_context
    else:
      raise ValueError('Failed to quantize op: %s, %s' % (op.name, op.type))
コード例 #2
0
def _FoldUnfusedBatchNorms(graph, is_training, freeze_batch_norm_delay):
    """Finds unfused batch norm layers and folds them into preceding layers.

  Folding only affects the following layers: Conv2D, fully connected, depthwise
  convolution.

  Args:
    graph: Graph to walk and modify.
    is_training: Bool, True if training.
    freeze_batch_norm_delay: How many steps to wait before freezing moving mean
      and variance and using them for batch normalization.

  Raises:
    ValueError: When batch norm folding fails.
  """
    input_to_ops_map = input_to_ops.InputToOps(graph)

    for bn in common.BatchNormGroups(graph):
        has_scaling = _HasScaling(graph, input_to_ops_map, bn)

        if not _IsValidUnfusedBatchNorm(graph, bn):
            continue

        # The mangling code intimately depends on BatchNorm node's internals.
        original_op, folded_op = _CreateFoldedOp(
            graph,
            bn,
            has_scaling=has_scaling,
            freeze_batch_norm_delay=freeze_batch_norm_delay,
            is_training=is_training)

        activation = common.GetEndpointActivationOp(graph, bn)
        if activation:
            nodes_modified_count = common.RerouteTensor(
                folded_op.outputs[0],
                original_op.outputs[0],
                can_modify=[activation])
            if nodes_modified_count != 1:
                raise ValueError('Unexpected inputs to op: %s' %
                                 activation.name)
            continue

        # Treat consumer ops in bypass modules differently since they have Add
        # operations instead of Relu* above.
        add_bypass_ctx = re.search(r'^(.*)/([^/]+)', bn).group(1)
        add_bypass = graph.get_operation_by_name(add_bypass_ctx + '/Add')
        nodes_modified_count = common.RerouteTensor(folded_op.outputs[0],
                                                    original_op.outputs[0],
                                                    can_modify=[add_bypass])
        if nodes_modified_count != 1:
            raise ValueError('Unexpected inputs to op: %s' % add_bypass.name)
コード例 #3
0
def FoldBatchNorms(graph):
    """Finds batch norm layers in the graph, folds them into preceding layers.

  Folding only affects the following layers: Conv2D, fully connected, depthwise
  convolution.

  Args:
    graph: Graph to walk and modify.

  Raises:
    ValueError: When batch norm folding fails.
  """
    # Fail immediately when the graph contains unsupported fused batch norm ops.
    if any(op for op in graph.get_operations() if op.type == 'FusedBatchNorm'):
        raise ValueError('Fused batch norm is not supported')

    input_to_ops_map = input_to_ops.InputToOps(graph)

    for bn in common.BatchNormGroups(graph):
        has_scaling = _HasScaling(graph, input_to_ops_map, bn)

        # The mangling code intimately depends on BatchNorm node's internals.
        original_op, folded_op = _CreateFoldedOp(graph,
                                                 bn,
                                                 has_scaling=has_scaling)

        activation = common.GetEndpointActivationOp(graph, bn)
        if activation:
            nodes_modified_count = graph_editor.reroute_ts(
                [folded_op.outputs[0]], [original_op.outputs[0]],
                can_modify=[activation])
            if nodes_modified_count != 1:
                raise ValueError('Unexpected inputs to op: %s' %
                                 activation.name)
            continue

        # Treat consumer ops in bypass modules differently since they have Add
        # operations instead of Relu* above.
        add_bypass_ctx = re.search(r'^(.*)/([^/]+)', bn).group(1)
        add_bypass = graph.get_operation_by_name(add_bypass_ctx + '/Add')
        nodes_modified_count = graph_editor.reroute_ts(
            [folded_op.outputs[0]], [original_op.outputs[0]],
            can_modify=[add_bypass])
        if nodes_modified_count != 1:
            raise ValueError('Unexpected inputs to op: %s' % add_bypass.name)
コード例 #4
0
def _FoldUnfusedBatchNorms(graph, is_training, freeze_batch_norm_delay):
    """Finds unfused batch norm layers and folds them into preceding layers.

  Folding only affects the following layers: Conv2D, fully connected, depthwise
  convolution.

  Args:
    graph: Graph to walk and modify.
    is_training: Bool, True if training.
    freeze_batch_norm_delay: How many steps to wait before freezing moving mean
      and variance and using them for batch normalization.

  Raises:
    ValueError: When batch norm folding fails.
  """
    input_to_ops_map = input_to_ops.InputToOps(graph)

    for bn in common.BatchNormGroups(graph):
        has_scaling = _HasScaling(graph, input_to_ops_map, bn)

        if not _IsValidUnfusedBatchNorm(graph, bn):
            continue

        print("found unfused batchnarm")
        raise Exception("Not Implemented")

        # The mangling code intimately depends on BatchNorm node's internals.
        original_op, folded_op = _CreateFoldedOp(
            graph,
            bn,
            has_scaling=has_scaling,
            freeze_batch_norm_delay=freeze_batch_norm_delay,
            is_training=is_training)

        activation = common.GetEndpointActivationOp(graph, bn)
        if activation:
            nodes_modified_count = common.RerouteTensor(
                folded_op.outputs[0],
                original_op.outputs[0],
                can_modify=[activation])
            if nodes_modified_count != 1:
                raise ValueError('Unexpected inputs to op: %s' %
                                 activation.name)
            continue

        # Treat consumer ops in bypass modules differently since they have Add
        # operations instead of Relu* above.
        # Changes to make sure that the correct scope is selected for the bypass add
        # The rule here is that if the scope is of the form: str1/str2 for the
        # batch norm,
        # the bypass add is at scope str1. If bn is of scope just str1, then the
        # bypass add is at scope ''.
        # If there is no batch norm, then there is no bypass add.
        add_bypass_ctx = ''
        if bn:
            try:
                add_bypass_ctx = re.search(r'^(.*)/([^/]+)', bn).group(1)
            except AttributeError:
                add_bypass_ctx = ''

        if add_bypass_ctx:
            add_bypass_ctx = add_bypass_ctx + '/'

        add_bypass = graph.get_operation_by_name(add_bypass_ctx + 'Add')
        nodes_modified_count = common.RerouteTensor(folded_op.outputs[0],
                                                    original_op.outputs[0],
                                                    can_modify=[add_bypass])
        if nodes_modified_count != 1:
            raise ValueError('Unexpected inputs to op: %s' % add_bypass.name)