def _maybe_squeeze_and_excite( params, input_filters_or_mask, inner_activation, gating_activation, enabled ): """Generate a squeeze-and-excite layer or identity function.""" def make_squeeze_and_excite(): return _squeeze_and_excite( params=params, input_filters_or_mask=input_filters_or_mask, inner_activation=inner_activation, gating_activation=gating_activation) # We use explicit bool comparisons to make sure a user doesn't pass in a # bad configuration like enabled=OneOf([False, True, 42]). if isinstance(enabled, bool): return make_squeeze_and_excite() if enabled else layers.Identity() elif isinstance(enabled, schema.OneOf): options = [] for choice in enabled.choices: options.append(make_squeeze_and_excite() if choice else layers.Identity()) return layers.maybe_switch_v2(enabled.mask, options) else: raise ValueError('Unsupported value for "enabled": {}'.format(enabled))
def _conv(params, input_filters_or_mask, output_filters_or_mask, kernel_size, strides = (1, 1), activation = None, use_batch_norm = True): """Conv2D + batch norm + activation, optionally searching over kernel size.""" if isinstance(kernel_size, schema.OneOf) and len(kernel_size.choices) > 1: choices = [] for kernel_size_value in kernel_size.choices: choices.append( _conv_with_fixed_kernel( params=params, input_filters_or_mask=input_filters_or_mask, output_filters_or_mask=output_filters_or_mask, kernel_size=kernel_size_value, strides=strides, activation=activation, use_batch_norm=use_batch_norm)) return layers.maybe_switch_v2(kernel_size.mask, choices) else: if isinstance(kernel_size, schema.OneOf): kernel_size_value = kernel_size.choices[0] else: kernel_size_value = kernel_size return _conv_with_fixed_kernel( params=params, input_filters_or_mask=input_filters_or_mask, output_filters_or_mask=output_filters_or_mask, kernel_size=kernel_size_value, strides=strides, activation=activation, use_batch_norm=use_batch_norm)
def _depthwise_conv( params, filters_or_mask, kernel_size, strides = (1, 1), activation = None): """Depthwise conv + BN + activation, optionally searching over kernel size.""" if isinstance(kernel_size, schema.OneOf) and len(kernel_size.choices) > 1: choices = [] for kernel_size_value in kernel_size.choices: choices.append( _depthwise_conv_with_fixed_kernel( params=params, filters_or_mask=filters_or_mask, kernel_size=kernel_size_value, strides=strides, activation=activation)) return layers.maybe_switch_v2(kernel_size.mask, choices) else: if isinstance(kernel_size, schema.OneOf): kernel_size_value = kernel_size.choices[0] else: kernel_size_value = kernel_size return _depthwise_conv_with_fixed_kernel( params=params, filters_or_mask=filters_or_mask, kernel_size=kernel_size_value, strides=strides, activation=activation)
def _build_oneof(params, layer_spec, input_filters, output_filters, filters_base): """Select one of N possible choices.""" if len(layer_spec.choices ) > 1 and not params['force_stateless_batch_norm']: raise ValueError( 'force_stateless_batch_norm must be true for models containing ' 'Switch layers (e.g., when performing architecture searches).') choices = [ _build_layer(params, choice, input_filters, output_filters, filters_base) for choice in layer_spec.choices ] return layers.maybe_switch_v2(layer_spec.mask, choices)