Пример #1
0
    def testSimpleOpGetRegularizer(self, use_batch_norm, use_partitioner,
                                   scope):
        # Tests the alive patern of the conv and relu ops.
        # use_batch_norm: A Boolean. Inidcats if batch norm should be used.
        # use_partitioner: A Boolean. Inidcats if a fixed_size_partitioner should be
        #   used.
        # scope: A String. with the scope to test.
        sc = self._batch_norm_scope() if use_batch_norm else []
        partitioner = tf.fixed_size_partitioner(2) if use_partitioner else None
        with tf.contrib.framework.arg_scope(sc):
            with tf.variable_scope(tf.get_variable_scope(),
                                   partitioner=partitioner):
                final_op = op_regularizer_stub.build_model()

        op_reg_manager = orm.OpRegularizerManager(
            [final_op], op_regularizer_stub.MOCK_REG_DICT)
        expected_alive = op_regularizer_stub.expected_alive()
        with self.test_session():
            conv_reg = op_reg_manager.get_regularizer(
                _get_op(scope + '/Conv2D'))
            self.assertAllEqual(expected_alive[scope],
                                conv_reg.alive_vector.eval())

            relu_reg = op_reg_manager.get_regularizer(_get_op(scope + '/Relu'))
            self.assertAllEqual(expected_alive[scope],
                                relu_reg.alive_vector.eval())
Пример #2
0
    def testGroupingOps(self, tested_op):
        th, size = 0.5, 11
        image = tf.constant(0.5, shape=[1, 17, 19, 3])

        conv1 = layers.conv2d(image, 5, [1, 1], padding='SAME', scope='conv1')
        conv2 = layers.conv2d(image, 5, [1, 1], padding='SAME', scope='conv2')
        res = tested_op(conv1, conv2)
        reg = {
            'conv1': np.random.random(size),
            'conv2': np.random.random(size)
        }

        def regularizer(conv_op, manager=None):
            del manager  # unused
            for prefix in ['conv1', 'conv2']:
                if conv_op.name.startswith(prefix):
                    return op_regularizer_stub.OpRegularizerStub(
                        reg[prefix], reg[prefix] > th)

        op_reg_manager = orm.OpRegularizerManager([res.op],
                                                  {'Conv2D': regularizer})
        with self.test_session():
            alive = op_reg_manager.get_regularizer(res.op).alive_vector.eval()
            self.assertAllEqual(
                alive, np.logical_or(reg['conv1'] > th, reg['conv2'] > th))
Пример #3
0
 def __init__(self, ops, gamma_threshold):
     gamma_l1_reg_factory = gamma_l1_regularizer.GammaL1RegularizerFactory(
         gamma_threshold)
     opreg_manager = op_regularizer_manager.OpRegularizerManager(
         ops, {
             'Conv2D': gamma_l1_reg_factory.create_regularizer,
             'DepthwiseConv2dNative':
             gamma_l1_reg_factory.create_regularizer
         })
     super(GammaFlopsRegularizer,
           self).__init__(opreg_manager, bilinear_cost_utils.flop_coeff)
Пример #4
0
 def __init__(self, ops, threshold):
     # Regularizer factories for convolution and fully connected layers.
     conv_regularizer_factory = (
         conv_group_lasso_regularizer.ConvGroupLassoRegularizerFactory(
             threshold))
     regularizer_factories = {
         'Conv2D': conv_regularizer_factory.create_regularizer,
         'Conv2DBackpropInput': conv_regularizer_factory.create_regularizer,
     }
     # Create OpRegularizerManager instance.
     opreg_manager = op_regularizer_manager.OpRegularizerManager(
         ops, regularizer_factories)
     super(GroupLassoFlopsRegularizer,
           self).__init__(opreg_manager, bilinear_cost_utils.flop_coeff)
Пример #5
0
    def testGetRegularizerForConcatWithNone(self, test_concat, depth):
        image = tf.constant(0.0, shape=[1, 17, 19, 3])
        conv2 = layers.conv2d(image, 5, [1, 1], padding='SAME', scope='conv2')
        other_input = tf.add(
            tf.identity(tf.constant(3.0, shape=[1, 17, 19, depth])), 3.0)
        # other_input has None as regularizer.
        concat = tf.concat([other_input, conv2], 3)
        output = tf.add(concat, concat, name='output_out')
        op = concat.op if test_concat else output.op
        op_reg_manager = orm.OpRegularizerManager(
            [output.op], op_regularizer_stub.MOCK_REG_DICT)
        expected_alive = op_regularizer_stub.expected_alive()

        with self.test_session():
            alive = op_reg_manager.get_regularizer(op).alive_vector.eval()
            self.assertAllEqual([True] * depth, alive[:depth])
            self.assertAllEqual(expected_alive['conv2'], alive[depth:])
Пример #6
0
    def testConcatOpGetRegularizer(self, use_batch_norm, use_partitioner):
        sc = self._batch_norm_scope() if use_batch_norm else []
        partitioner = tf.fixed_size_partitioner(2) if use_partitioner else None
        with tf.contrib.framework.arg_scope(sc):
            with tf.variable_scope(tf.get_variable_scope(),
                                   partitioner=partitioner):
                final_op = op_regularizer_stub.build_model()
        op_reg_manager = orm.OpRegularizerManager(
            [final_op], op_regularizer_stub.MOCK_REG_DICT)
        expected_alive = op_regularizer_stub.expected_alive()

        expected = np.logical_or(expected_alive['conv4'],
                                 expected_alive['concat'])
        with self.test_session():
            conv_reg = op_reg_manager.get_regularizer(_get_op('conv4/Conv2D'))
            self.assertAllEqual(expected, conv_reg.alive_vector.eval())

            relu_reg = op_reg_manager.get_regularizer(_get_op('conv4/Relu'))
            self.assertAllEqual(expected, relu_reg.alive_vector.eval())