def __call__(self, shape, dtype="float32"): """ Call function for the ICNR initializer. Parameters ---------- shape: tuple or list The required resized shape for the output tensor dtype: str The data type for the tensor Returns ------- tensor The modified kernel weights """ shape = list(shape) if self.scale == 1: return self.initializer(shape) new_shape = shape[:3] + [shape[3] // (self.scale**2)] if isinstance(self.initializer, dict): self.initializer = initializers.deserialize(self.initializer) var_x = self.initializer(new_shape, dtype) var_x = K.permute_dimensions(var_x, [2, 0, 1, 3]) var_x = self._resize_nearest_neighbour( var_x, (shape[0] * self.scale, shape[1] * self.scale)) var_x = self._space_to_depth(var_x) var_x = K.permute_dimensions(var_x, [1, 2, 0, 3]) logger.debug("Output: %s", var_x) return var_x
def test_serialization_deserialization(self, cls_name, expected_cls): initializer = initializers.get(cls_name) self.assertIsInstance(initializer, expected_cls) config = initializers.serialize(initializer) recreated = initializers.deserialize(config) self.assertIsInstance(recreated, expected_cls) self.assertEqual(config, initializers.serialize(recreated))
def test_load_external_variance_scaling_v2(self): external_serialized_json = { 'class_name': 'VarianceScaling', 'config': { 'distribution': 'normal', 'mode': 'fan_avg', 'scale': 1.0, 'seed': None } } initializer = initializers.deserialize(external_serialized_json) self.assertEqual(initializer.distribution, 'truncated_normal')
def test_load_external_variance_scaling_v2(self): external_serialized_json = { "class_name": "VarianceScaling", "config": { "distribution": "normal", "mode": "fan_avg", "scale": 1.0, "seed": None, }, } initializer = initializers.deserialize(external_serialized_json) self.assertEqual(initializer.distribution, "truncated_normal")
def __call__(self, shape, dtype='float32'): # tf needs partition_info=None shape = list(shape) if self.scale == 1: return self.initializer(shape) new_shape = shape[:3] + [shape[3] // (self.scale ** 2)] if type(self.initializer) is dict: self.initializer = initializers.deserialize(self.initializer) var_x = self.initializer(new_shape, dtype) var_x = tf.transpose(var_x, perm=[2, 0, 1, 3]) var_x = tf.image.resize_nearest_neighbor( var_x, size=(shape[0] * self.scale, shape[1] * self.scale), align_corners=True) var_x = tf.space_to_depth(var_x, block_size=self.scale, data_format='NHWC') var_x = tf.transpose(var_x, perm=[1, 2, 0, 3]) return var_x
def __call__(self, shape, dtype='float32'): # tf needs partition_info=None shape = list(shape) if self.scale == 1: return self.initializer(shape) new_shape = shape[:3] + [shape[3] // (self.scale ** 2)] if isinstance(self.initializer, dict): # 反序列化 self.initializer = initializers.deserialize(self.initializer) var_x = self.initializer(new_shape, dtype) var_x = tf.transpose(var_x, perm=[2, 0, 1, 3]) # 使用最近邻插值调整images为size,输入和输出张量的4个角像素的中心对齐,并保留角落像素处的值 var_x = tf.image.resize_nearest_neighbor( var_x, size=(shape[0] * self.scale, shape[1] * self.scale), align_corners=True) var_x = tf.space_to_depth(var_x, block_size=self.scale, data_format='NHWC') var_x = tf.transpose(var_x, perm=[1, 2, 0, 3]) return var_x