def testSimpleOpGetRegularizer(self, use_batch_norm, use_partitioner, scope): # Tests the alive patern of the conv and relu ops. # use_batch_norm: A Boolean. Inidcats if batch norm should be used. # use_partitioner: A Boolean. Inidcats if a fixed_size_partitioner should be # used. # scope: A String. with the scope to test. sc = self._batch_norm_scope() if use_batch_norm else [] partitioner = tf.fixed_size_partitioner(2) if use_partitioner else None with tf.contrib.framework.arg_scope(sc): with tf.variable_scope(tf.get_variable_scope(), partitioner=partitioner): final_op = op_regularizer_stub.build_model() op_reg_manager = orm.OpRegularizerManager( [final_op], op_regularizer_stub.MOCK_REG_DICT) expected_alive = op_regularizer_stub.expected_alive() with self.test_session(): conv_reg = op_reg_manager.get_regularizer( _get_op(scope + '/Conv2D')) self.assertAllEqual(expected_alive[scope], conv_reg.alive_vector.eval()) relu_reg = op_reg_manager.get_regularizer(_get_op(scope + '/Relu')) self.assertAllEqual(expected_alive[scope], relu_reg.alive_vector.eval())
def testGetRegularizerForConcatWithNone(self, test_concat, depth): image = tf.constant(0.0, shape=[1, 17, 19, 3]) conv2 = layers.conv2d(image, 5, [1, 1], padding='SAME', scope='conv2') other_input = tf.add( tf.identity(tf.constant(3.0, shape=[1, 17, 19, depth])), 3.0) # other_input has None as regularizer. concat = tf.concat([other_input, conv2], 3) output = tf.add(concat, concat, name='output_out') op = concat.op if test_concat else output.op op_reg_manager = orm.OpRegularizerManager( [output.op], op_regularizer_stub.MOCK_REG_DICT) expected_alive = op_regularizer_stub.expected_alive() with self.test_session(): alive = op_reg_manager.get_regularizer(op).alive_vector.eval() self.assertAllEqual([True] * depth, alive[:depth]) self.assertAllEqual(expected_alive['conv2'], alive[depth:])
def testGetRegularizerForConcatWithNone(self, test_concat, depth): image = tf.constant(0.0, shape=[1, 17, 19, 3]) conv2 = layers.conv2d(image, 5, [1, 1], padding='SAME', scope='conv2') other_input = tf.add( tf.identity(tf.constant(3.0, shape=[1, 17, 19, depth])), 3.0) # other_input has None as regularizer. concat = tf.concat([other_input, conv2], 3) output = tf.add(concat, concat, name='output_out') op = concat.op if test_concat else output.op op_reg_manager = orm.OpRegularizerManager([output.op], op_regularizer_stub.MOCK_REG_DICT) expected_alive = op_regularizer_stub.expected_alive() with self.test_session(): alive = op_reg_manager.get_regularizer(op).alive_vector.eval() self.assertAllEqual([True] * depth, alive[:depth]) self.assertAllEqual(expected_alive['conv2'], alive[depth:])
def testConcatOpGetRegularizer(self, use_batch_norm, use_partitioner): sc = self._batch_norm_scope() if use_batch_norm else [] partitioner = tf.fixed_size_partitioner(2) if use_partitioner else None with tf.contrib.framework.arg_scope(sc): with tf.variable_scope(tf.get_variable_scope(), partitioner=partitioner): final_op = op_regularizer_stub.build_model() op_reg_manager = orm.OpRegularizerManager([final_op], op_regularizer_stub.MOCK_REG_DICT) expected_alive = op_regularizer_stub.expected_alive() expected = np.logical_or(expected_alive['conv4'], expected_alive['concat']) with self.test_session(): conv_reg = op_reg_manager.get_regularizer(_get_op('conv4/Conv2D')) self.assertAllEqual(expected, conv_reg.alive_vector.eval()) relu_reg = op_reg_manager.get_regularizer(_get_op('conv4/Relu')) self.assertAllEqual(expected, relu_reg.alive_vector.eval())
def testConcatOpGetRegularizer(self, use_batch_norm, use_partitioner): sc = self._batch_norm_scope() if use_batch_norm else [] partitioner = tf.fixed_size_partitioner(2) if use_partitioner else None with tf.contrib.framework.arg_scope(sc): with tf.variable_scope(tf.get_variable_scope(), partitioner=partitioner): final_op = op_regularizer_stub.build_model() op_reg_manager = orm.OpRegularizerManager( [final_op], op_regularizer_stub.MOCK_REG_DICT) expected_alive = op_regularizer_stub.expected_alive() expected = np.logical_or(expected_alive['conv4'], expected_alive['concat']) with self.test_session(): conv_reg = op_reg_manager.get_regularizer(_get_op('conv4/Conv2D')) self.assertAllEqual(expected, conv_reg.alive_vector.eval()) relu_reg = op_reg_manager.get_regularizer(_get_op('conv4/Relu')) self.assertAllEqual(expected, relu_reg.alive_vector.eval())
def testSimpleOpGetRegularizer(self, use_batch_norm, use_partitioner, scope): # Tests the alive patern of the conv and relu ops. # use_batch_norm: A Boolean. Inidcats if batch norm should be used. # use_partitioner: A Boolean. Inidcats if a fixed_size_partitioner should be # used. # scope: A String. with the scope to test. sc = self._batch_norm_scope() if use_batch_norm else [] partitioner = tf.fixed_size_partitioner(2) if use_partitioner else None with tf.contrib.framework.arg_scope(sc): with tf.variable_scope(tf.get_variable_scope(), partitioner=partitioner): final_op = op_regularizer_stub.build_model() op_reg_manager = orm.OpRegularizerManager([final_op], op_regularizer_stub.MOCK_REG_DICT) expected_alive = op_regularizer_stub.expected_alive() with self.test_session(): conv_reg = op_reg_manager.get_regularizer(_get_op(scope + '/Conv2D')) self.assertAllEqual(expected_alive[scope], conv_reg.alive_vector.eval()) relu_reg = op_reg_manager.get_regularizer(_get_op(scope + '/Relu')) self.assertAllEqual(expected_alive[scope], relu_reg.alive_vector.eval())