def _make_unique_name(self, name_uid_map=None, avoid_names=None, namespace='', zero_based=False): base_name = base_layer.to_snake_case(self.__class__.__name__) name = base_layer.unique_layer_name(base_name, name_uid_map=name_uid_map, avoid_names=avoid_names, namespace=namespace, zero_based=zero_based) return (name, base_name)
def conv2d(inputs, filters, kernel_size, strides, padding, name="conv2d", rate=1, scope=None, normalizer_fn=None, return_preact=False, **args): activation = False if "activation_fn" in args: activation_fn = args["activation_fn"] del args["activation_fn"] activation = True if "use_bias" in args and args["use_bias"]: assert not args["use_bias"] del args["use_bias"] if "kernel_initializer" in args: del args["kernel_initializer"] if "data_format" in args: args["data_format"] = "NCHW" if args[ "data_format"] == "channels_first" else "NHWC" else: args["data_format"] = "NHWC" # kernel if not isinstance(kernel_size, list): kernel_size = [kernel_size] * 2 # data_format dependent kernel_size, strides, dilations if args["data_format"] == "NHWC": num_channels = getshape(inputs)[-1] if not isinstance(strides, list): strides = [1] + [strides] * 2 + [1] dilations = [1, rate, rate, 1] else: num_channels = getshape(inputs)[0] if not isinstance(strides, list): strides = [1, 1] + [strides] * 2 dilations = [1, 1] + [rate, rate] # get filter with tf.variable_scope(unique_layer_name(name, zero_based=True)) as vc: weight_s, weight_p = variableFromSettings(kernel_size + [num_channels, filters]) # conv with sampled filter conv_args = { "input": inputs, "strides": strides, "padding": padding, "dilations": dilations, **args } conv = tf.nn.conv2d(filter=weight_s, name=name, **conv_args) if normalizer_fn is not None: conv = normalizer_fn(conv, name="bn") # check if an activation needs some work beforehand if activation and activation_fn is not None: if "activation_prepare" in GLOBAL: preact = GLOBAL["activation_prepare"](weight_s, weight_p, inputs, conv, conv_args) if return_preact: return preact conv = activation_fn(conv) # just return that return conv