def testConfigurableConv2DAlternateOpSuffixes(self): # ConfigurableConv2D accepts both 'Conv2D' and 'ConfigurableConv2D' as op # suffixes in the parameterization to be compatible with structures learned # using keras.layers.Conv2D or ConfigurableConv2D. valid_parameterization_1 = { 'conv1/Conv2D': 1, } out1 = ops.ConfigurableConv2D( parameterization=valid_parameterization_1, filters=10, kernel_size=3, name='conv1')(self.inputs) self.assertEqual(out1.shape.as_list()[-1], 1) valid_parameterization_2 = { 'conv2/ConfigurableConv2D': 2, } out2 = ops.ConfigurableConv2D( parameterization=valid_parameterization_2, filters=10, kernel_size=3, name='conv2')(self.inputs) self.assertEqual(out2.shape.as_list()[-1], 2) # Only one op suffix variant should exist in the parameterization. bad_parameterization = { 'conv3/Conv2D': 1, 'conv3/ConfigurableConv2D': 2 } with self.assertRaises(KeyError): _ = ops.ConfigurableConv2D(parameterization=bad_parameterization, filters=10, kernel_size=3, name='conv3')(self.inputs)
def testParameterizeNamesWithSlashes(self): conv1 = ops.ConfigurableConv2D(parameterization={ 'name/with/slashes/ConfigurableConv2D': 1, }, filters=10, kernel_size=3, name='name/with/slashes') out1 = conv1(self.inputs) self.assertEqual(out1.shape.as_list()[-1], 1) conv2 = ops.ConfigurableConv2D( parameterization={'name//with///multislash/ConfigurableConv2D': 2}, filters=10, kernel_size=3, name='name//with///multislash') out2 = conv2(self.inputs) self.assertEqual(out2.shape.as_list()[-1], 2) # When Keras calls tf.variable_scope with N trailing slashes, # tf.variable_scope will create a scope with N-1 trailing slashes. conv3 = ops.ConfigurableConv2D( parameterization={'name/ends/with/slashes//ConfigurableConv2D': 3}, filters=10, kernel_size=3, name='name/ends/with/slashes///') out3 = conv3(self.inputs) self.assertEqual(3, out3.shape.as_list()[-1])
def testParameterizeDuplicateNames(self): parameterization = { 'conv1/ConfigurableConv2D': 1, 'conv1_1/ConfigurableConv2D': 2, 'configurable_conv2d/ConfigurableConv2D': 3, 'configurable_conv2d_1/ConfigurableConv2D': 4, } conv1 = ops.ConfigurableConv2D(parameterization=parameterization, filters=10, kernel_size=3, name='conv1') conv1_1 = ops.ConfigurableConv2D(parameterization=parameterization, filters=10, kernel_size=3, name='conv1') conv_default_name = ops.ConfigurableConv2D( parameterization=parameterization, filters=10, kernel_size=3) conv_default_name_1 = ops.ConfigurableConv2D( parameterization=parameterization, filters=10, kernel_size=3) out = conv1(self.inputs) self.assertEqual(1, out.shape.as_list()[-1]) out = conv1_1(self.inputs) self.assertEqual(2, out.shape.as_list()[-1]) out = conv_default_name(self.inputs) self.assertEqual(3, out.shape.as_list()[-1]) out = conv_default_name_1(self.inputs) self.assertEqual(4, out.shape.as_list()[-1])
def testConfigurableConv2DFunctionality(self): out = ops.ConfigurableConv2D(filters=5, kernel_size=2)(self.inputs) expected = keras_layers.Conv2D(filters=5, kernel_size=2)(self.inputs) self.assertAllEqual(out.shape, expected.shape) self.assertIn( 'configurable_conv2d/ConfigurableConv2D', [op.name for op in tf.get_default_graph().get_operations()])
def testConfigurableConv2DParameterization(self): # with default name conv1 = ops.ConfigurableConv2D( parameterization={'configurable_conv2d/ConfigurableConv2D': 1}, filters=10, kernel_size=3) out1 = conv1(self.inputs) self.assertEqual(1, out1.shape.as_list()[-1]) # with custom name conv2 = ops.ConfigurableConv2D( parameterization={'conv2/ConfigurableConv2D': 2}, filters=10, kernel_size=3, name='conv2') out2 = conv2(self.inputs) self.assertEqual(2, out2.shape.as_list()[-1])
def testStrictness(self): parameterization = { 'unused_conv/Conv2D': 2, } conv_not_strict = ops.ConfigurableConv2D( parameterization=parameterization, is_strict=False, filters=10, kernel_size=3) conv_strict = ops.ConfigurableConv2D(parameterization=parameterization, is_strict=True, filters=10, kernel_size=3) # extra ops in the parameterization are ok out = conv_not_strict(self.inputs) self.assertEqual(10, out.shape.as_list()[-1]) # when strict=True, all ops in the parameterization must be used with self.assertRaises(KeyError): out = conv_strict(self.inputs)