Esempio n. 1
0
def _get_base_op_hander_dicts():
    return collections.defaultdict(
        grouping_op_handler.GroupingOpHandler, {
            'ConcatV2':
            concat_op_handler.ConcatOpHandler(),
            'DepthToSpace':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'DepthwiseConv2dNative':
            depthwise_convolution_op_handler.DepthwiseConvolutionOpHandler(),
            'ExpandDims':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'RandomUniform':
            leaf_op_handler.LeafOpHandler(),
            'Reshape':
            leaf_op_handler.LeafOpHandler(),
            'Shape':
            leaf_op_handler.LeafOpHandler(),
            'SpaceToDepth':
            leaf_op_handler.LeafOpHandler(),
            'StridedSlice':
            leaf_op_handler.LeafOpHandler(),
            'TensorArrayGatherV3':
            leaf_op_handler.LeafOpHandler(),
            'Transpose':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
        })
Esempio n. 2
0
def _get_base_op_hander_dicts():
    """Returns the base op_hander_dict for all regularizers."""
    base_dict = collections.defaultdict(
        grouping_op_handler.GroupingOpHandler, {
            'ConcatV2':
            concat_op_handler.ConcatOpHandler(),
            'DepthToSpace':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'DepthwiseConv2dNative':
            depthwise_convolution_op_handler.DepthwiseConvolutionOpHandler(),
            'ExpandDims':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'RandomUniform':
            leaf_op_handler.LeafOpHandler(),
            'Reshape':
            leaf_op_handler.LeafOpHandler(),
            'Shape':
            leaf_op_handler.LeafOpHandler(),
            'SpaceToDepth':
            leaf_op_handler.LeafOpHandler(),
            'StridedSlice':
            leaf_op_handler.LeafOpHandler(),
            'TensorArrayGatherV3':
            leaf_op_handler.LeafOpHandler(),
            'Transpose':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
        })
    for resize_method in RESIZE_OP_NAMES:
        # Resize* ops, second input might be a tensor which will result in an error.
        base_dict[resize_method] = grouping_op_handler.GroupingOpHandler([0])
    return base_dict
Esempio n. 3
0
    def testAssignGrouping_DepthMultiplier(self):
        # All neighbor ops have groups.
        self.op_group_dict = {
            self.relu1_op_slice: self.relu1_op_group,
        }

        # Call handler to assign grouping.
        handler = depthwise_convolution_op_handler.DepthwiseConvolutionOpHandler(
        )
        handler.assign_grouping(self.dwise_conv2_op, self.mock_op_reg_manager)

        # Verify manager looks up OpSlice for ops of interest.
        self.mock_op_reg_manager.get_op_slices.assert_has_calls(
            # Checking for ops to process.
            [
                mock.call(self.relu1_op),
                mock.call(self.conv2_op),
                # Reslicing.
                mock.call(self.relu1_op),
                mock.call(self.dwise_conv2_op),
                # Refreshing slice data.
                mock.call(self.relu1_op),
                # Group depthwise convolution.
                mock.call(self.dwise_conv2_op)
            ])

        # Verify manager groups batch norm with inputs and outputs.
        self.mock_op_reg_manager.group_op_slices.assert_has_calls([
            mock.call([self.dwise_conv2_op_slice_0_1,
                       self.relu1_op_slice_0_1]),
            mock.call([self.dwise_conv2_op_slice_1_2,
                       self.relu1_op_slice_0_1]),
            mock.call([self.dwise_conv2_op_slice_2_3,
                       self.relu1_op_slice_1_2]),
            mock.call([self.dwise_conv2_op_slice_3_4,
                       self.relu1_op_slice_1_2]),
            mock.call([self.dwise_conv2_op_slice_4_5,
                       self.relu1_op_slice_2_3]),
            mock.call([self.dwise_conv2_op_slice_5_6,
                       self.relu1_op_slice_2_3]),
            mock.call([self.dwise_conv2_op_slice_6_7,
                       self.relu1_op_slice_3_4]),
            mock.call([self.dwise_conv2_op_slice_7_8,
                       self.relu1_op_slice_3_4]),
            mock.call([self.dwise_conv2_op_slice_8_9,
                       self.relu1_op_slice_4_5]),
            mock.call(
                [self.dwise_conv2_op_slice_9_10, self.relu1_op_slice_4_5])
        ])

        # Verify manager does not process any additional ops.
        self.mock_op_reg_manager.process_ops.assert_called_once_with(
            [self.conv2_op])
        self.mock_op_reg_manager.process_ops_last.assert_not_called()
Esempio n. 4
0
    def testAssignGrouping_NoDepthMultiplier(self):
        # Repeat setUp, but with depth_multiplier=1.  Unfortunately, this involves
        # rebuilding the graph from scratch.
        tf.reset_default_graph()

        # This tests a Conv2D -> SeparableConv2D -> Conv2D chain of ops.
        with tf.contrib.framework.arg_scope(self._batch_norm_scope()):
            inputs = tf.zeros([2, 4, 4, 3])
            c1 = layers.conv2d(inputs,
                               num_outputs=5,
                               kernel_size=3,
                               scope='conv1')
            c2 = layers.separable_conv2d(c1,
                                         num_outputs=8,
                                         kernel_size=3,
                                         depth_multiplier=1,
                                         scope='conv2')
            layers.conv2d(c2, num_outputs=6, kernel_size=3, scope='conv3')

        g = tf.get_default_graph()

        # Declare OpSlice and OpGroup for ops of interest.
        self.dwise_conv2_op = g.get_operation_by_name(
            'conv2/separable_conv2d/depthwise')
        self.dwise_conv2_op_slice = orm.OpSlice(self.dwise_conv2_op,
                                                orm.Slice(0, 5))

        self.conv2_op = g.get_operation_by_name('conv2/separable_conv2d')
        self.conv2_op_slice = orm.OpSlice(self.conv2_op, orm.Slice(0, 8))

        self.relu1_op = g.get_operation_by_name('conv1/Relu')
        self.relu1_op_slice = orm.OpSlice(self.relu1_op, orm.Slice(0, 5))
        self.relu1_op_group = orm.OpGroup(self.relu1_op_slice)

        self.conv3_op = g.get_operation_by_name('conv3/Conv2D')
        self.conv3_op_slice = orm.OpSlice(self.conv3_op, orm.Slice(0, 6))

        # Create mock OpRegularizerManager with custom mapping of OpSlice and
        # OpGroup.
        self.mock_op_reg_manager = mock.create_autospec(
            orm.OpRegularizerManager)

        self.op_slice_dict = {
            self.dwise_conv2_op: [self.dwise_conv2_op_slice],
            self.conv2_op: [self.conv2_op_slice],
            self.relu1_op: [self.relu1_op_slice],
            self.conv3_op: [self.conv3_op_slice],
        }

        def get_op_slices(op):
            return self.op_slice_dict.get(op)

        def get_op_group(op_slice):
            return self.op_group_dict.get(op_slice)

        self.mock_op_reg_manager.get_op_slices.side_effect = get_op_slices
        self.mock_op_reg_manager.get_op_group.side_effect = get_op_group
        self.mock_op_reg_manager.is_source_op.return_value = False
        self.mock_op_reg_manager.ops = [
            self.relu1_op, self.dwise_conv2_op, self.conv2_op, self.conv3_op
        ]

        # All neighbor ops have groups.
        self.op_group_dict = {
            self.relu1_op_slice: self.relu1_op_group,
        }

        # Call handler to assign grouping.
        handler = depthwise_convolution_op_handler.DepthwiseConvolutionOpHandler(
        )
        handler.assign_grouping(self.dwise_conv2_op, self.mock_op_reg_manager)

        # Verify manager looks up OpSlice for ops of interest.
        self.mock_op_reg_manager.get_op_slices.assert_has_calls(
            # Checking for ops to process.
            [
                mock.call(self.relu1_op),
                mock.call(self.conv2_op),
                # Initial slice data.
                mock.call(self.dwise_conv2_op),
                mock.call(self.relu1_op),
                # Reslicing.
                mock.call(self.relu1_op),
                mock.call(self.dwise_conv2_op),
                # Refreshing slice data.
                mock.call(self.relu1_op),
                # Group depthwise convolution.
                mock.call(self.dwise_conv2_op)
            ])

        # Verify manager groups batch norm with inputs and outputs.
        self.mock_op_reg_manager.group_op_slices.assert_called_once_with(
            [self.dwise_conv2_op_slice, self.relu1_op_slice])

        # Verify manager does not process any additional ops.
        self.mock_op_reg_manager.process_ops.assert_called_once_with(
            [self.conv2_op])
        self.mock_op_reg_manager.process_ops_last.assert_not_called()
Esempio n. 5
0
    def __init__(self,
                 ops,
                 gamma_threshold,
                 regularizer_decorator: Type[
                     generic_regularizers.OpRegularizer] = None,
                 decorator_parameters=None,
                 force_group=None,
                 regularizer_blacklist=None):
        """Creates a GammaFlopsRegularizer object.

    Args:
      ops: A list of tf.Operation. An OpRegularizer will be created for all the
        ops in `ops`, and recursively for all ops they depend on via data
        dependency. Typically `ops` would contain a single tf.Operation, which
        is the output of the network.
      gamma_threshold: A float scalar, will be used as a 'gamma_threshold' for
        all instances GammaL1Regularizer created by this class.
      regularizer_decorator: A class of OpRegularizer decorator to use.
      decorator_parameters: A dictionary of parameters to pass to the decorator
        factory. To be used only with decorators that requires parameters,
        otherwise use None.
      force_group: List of regex for ops that should be force-grouped.  Each
        regex corresponds to a separate group.  Use '|' operator to specify
        multiple patterns in a single regex. See op_regularizer_manager for more
        detail.
      regularizer_blacklist: List of regex for ops that should not be
        regularized. See op_regularizer_manager for more detail.
    """
        source_op_handler = batch_norm_source_op_handler.BatchNormSourceOpHandler(
            gamma_threshold)
        if regularizer_decorator:
            source_op_handler = op_handler_decorator.OpHandlerDecorator(
                source_op_handler, regularizer_decorator, decorator_parameters)
        op_handler_dict = collections.defaultdict(
            grouping_op_handler.GroupingOpHandler)
        op_handler_dict.update({
            'FusedBatchNorm':
            source_op_handler,
            'FusedBatchNormV2':
            source_op_handler,
            'Conv2D':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'ConcatV2':
            concat_op_handler.ConcatOpHandler(),
            'DepthToSpace':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'DepthwiseConv2dNative':
            depthwise_convolution_op_handler.DepthwiseConvolutionOpHandler(),
            'MatMul':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'TensorArrayGatherV3':
            leaf_op_handler.LeafOpHandler(),
            'RandomUniform':
            leaf_op_handler.LeafOpHandler(),
            'Reshape':
            leaf_op_handler.LeafOpHandler(),
            'Transpose':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'ExpandDims':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
        })

        self._manager = orm.OpRegularizerManager(
            ops,
            op_handler_dict,
            force_group=force_group,
            regularizer_blacklist=regularizer_blacklist)
        self._calculator = cost_calculator.CostCalculator(
            self._manager, resource_function.flop_function)
Esempio n. 6
0
    def __init__(self,
                 ops,
                 threshold,
                 l1_fraction=0,
                 regularizer_decorator: Type[
                     generic_regularizers.OpRegularizer] = None,
                 decorator_parameters=None,
                 force_group=None,
                 regularizer_blacklist=None,
                 convert_to_variable=True):
        """Creates a GroupLassoFlopsRegularizer object.

    Args:
      ops: A list of tf.Operation. An OpRegularizer will be created for all the
        ops in `ops`, and recursively for all ops they depend on via data
        dependency. Typically `ops` would contain a single tf.Operation, which
        is the output of the network.
      threshold: A float scalar, will be used as a 'threshold' for all
        regularizer instances created by this class.
      l1_fraction: Relative weight of L1 in L1 + L2 regularization.
      regularizer_decorator: A class of OpRegularizer decorator to use.
      decorator_parameters: A dictionary of parameters to pass to the decorator
        factory. To be used only with decorators that requires parameters,
        otherwise use None.
      force_group: List of regex for ops that should be force-grouped.  Each
        regex corresponds to a separate group.  Use '|' operator to specify
        multiple patterns in a single regex. See op_regularizer_manager for more
        detail.
      regularizer_blacklist: List of regex for ops that should not be
        regularized. See op_regularizer_manager for more detail.
      convert_to_variable: If `True` convert to variable in the
        `GroupLassoBaseOpHandler`. If you're graph creates variables outside of
        `tf.get_variable`, set to `False`.
    """
        conv2d_handler = conv2d_source_op_handler.Conv2DSourceOpHandler(
            threshold, l1_fraction, convert_to_variable)
        conv2d_transpose_handler = (
            conv2d_transpose_source_op_handler.Conv2DTransposeSourceOpHandler(
                threshold, l1_fraction, convert_to_variable))
        matmul_handler = matmul_source_op_handler.MatMulSourceOpHandler(
            threshold, l1_fraction, convert_to_variable)
        if regularizer_decorator:
            conv2d_handler = op_handler_decorator.OpHandlerDecorator(
                conv2d_handler, regularizer_decorator, decorator_parameters)
            conv2d_transpose_handler = op_handler_decorator.OpHandlerDecorator(
                conv2d_transpose_handler, regularizer_decorator,
                decorator_parameters)
            matmul_handler = op_handler_decorator.OpHandlerDecorator(
                matmul_handler, regularizer_decorator, decorator_parameters)

        op_handler_dict = collections.defaultdict(
            grouping_op_handler.GroupingOpHandler)
        op_handler_dict.update({
            'Conv2D':
            conv2d_handler,
            'Conv2DBackpropInput':
            conv2d_transpose_handler,
            'ConcatV2':
            concat_op_handler.ConcatOpHandler(),
            'DepthToSpace':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'DepthwiseConv2dNative':
            depthwise_convolution_op_handler.DepthwiseConvolutionOpHandler(),
            'MatMul':
            matmul_handler,
            'RandomUniform':
            leaf_op_handler.LeafOpHandler(),
            'Reshape':
            leaf_op_handler.LeafOpHandler(),
            'Shape':
            leaf_op_handler.LeafOpHandler(),
            'TensorArrayGatherV3':
            leaf_op_handler.LeafOpHandler(),
            'Transpose':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'StridedSlice':
            leaf_op_handler.LeafOpHandler(),
        })

        self._manager = orm.OpRegularizerManager(
            ops,
            op_handler_dict,
            force_group=force_group,
            regularizer_blacklist=regularizer_blacklist)
        self._calculator = cost_calculator.CostCalculator(
            self._manager, resource_function.flop_function)
Esempio n. 7
0
    def __init__(self,
                 ops,
                 gamma_threshold,
                 hardware,
                 batch_size=1,
                 regularizer_decorator: Type[
                     generic_regularizers.OpRegularizer] = None,
                 decorator_parameters=None,
                 force_group=None,
                 regularizer_blacklist=None) -> None:
        """Creates a GammaLatencyRegularizer object.

    Latency cost and regularization loss is calculated for a specified hardware
    platform.

    Args:
      ops: A list of tf.Operation. An OpRegularizer will be created for all
        the ops in `ops`, and recursively for all ops they depend on via data
        dependency. Typically `ops` would contain a single tf.Operation, which
        is the output of the network.
      gamma_threshold: A float scalar, will be used as a 'gamma_threshold' for
        all instances GammaL1Regularizer created by this class.
      hardware: String name of hardware platform to target.  Must be a key from
        resource_function.PEAK_COMPUTE.
      batch_size: Integer batch size to calculate cost/loss for.
      regularizer_decorator: A string, the name of the regularizer decorators
        to use. Supported decorators are listed in
        op_regularizer_decorator.SUPPORTED_DECORATORS.
      decorator_parameters: A dictionary of parameters to pass to the decorator
        factory. To be used only with decorators that requires parameters,
        otherwise use None.
      force_group: List of regex for ops that should be force-grouped.  Each
        regex corresponds to a separate group.  Use '|' operator to specify
        multiple patterns in a single regex. See op_regularizer_manager for
        more detail.
      regularizer_blacklist: List of regex for ops that should not be
        regularized. See op_regularizer_manager for more detail.
    """
        source_op_handler = batch_norm_source_op_handler.BatchNormSourceOpHandler(
            gamma_threshold)
        if regularizer_decorator:
            source_op_handler = op_handler_decorator.OpHandlerDecorator(
                source_op_handler, regularizer_decorator, decorator_parameters)
        op_handler_dict = collections.defaultdict(
            grouping_op_handler.GroupingOpHandler)
        op_handler_dict.update({
            'FusedBatchNorm':
            source_op_handler,
            'FusedBatchNormV2':
            source_op_handler,
            'Conv2D':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'ConcatV2':
            concat_op_handler.ConcatOpHandler(),
            'DepthToSpace':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'DepthwiseConv2dNative':
            depthwise_convolution_op_handler.DepthwiseConvolutionOpHandler(),
            'MatMul':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
            'TensorArrayGatherV3':
            leaf_op_handler.LeafOpHandler(),
            'Transpose':
            output_non_passthrough_op_handler.OutputNonPassthroughOpHandler(),
        })

        self._manager = orm.OpRegularizerManager(
            ops,
            op_handler_dict,
            force_group=force_group,
            regularizer_blacklist=regularizer_blacklist)
        self._calculator = cost_calculator.CostCalculator(
            self._manager,
            resource_function.latency_function_factory(hardware, batch_size))
        self._hardware = hardware