def build_model_base(images, model_name, training, override_params=None): """A helper functiion to create a base model and return global_pool. Args: images: input images tensor. model_name: string, the predefined model name. training: boolean, whether the model is constructed for training. override_params: A dictionary of params for overriding. Fields must exist in efficientnet_model.GlobalParams. Returns: features: global pool features. endpoints: the endpoints for each layer. Raises: When model_name specified an undefined model, raises NotImplementedError. When override_params has invalid fields, raises ValueError. """ assert isinstance(images, tf.Tensor) # For backward compatibility. if override_params and override_params.get('drop_connect_rate', None): override_params['survival_prob'] = 1 - override_params['drop_connect_rate'] blocks_args, global_params = get_model_params(model_name, override_params) with tf.variable_scope(model_name, reuse = tf.AUTO_REUSE): model = efficientnet_model.Model(blocks_args, global_params) features = model(images, training=training, features_only=True) features = tf.identity(features, 'features') return features, model.endpoints
def build_model_base(images, model_name, training, override_params=None): """A helper functiion to create a base model and return global_pool. Args: images: input images tensor. model_name: string, the model name of a pre-defined MnasNet. training: boolean, whether the model is constructed for training. override_params: A dictionary of params for overriding. Fields must exist in mnasnet_model.GlobalParams. Returns: features: global pool features. endpoints: the endpoints for each layer. Raises: When model_name specified an undefined model, raises NotImplementedError. When override_params has invalid fields, raises ValueError. """ assert isinstance(images, tf.Tensor) blocks_args, global_params = get_model_params(model_name, override_params) with tf.variable_scope(model_name): model = efficientnet_model.Model(blocks_args, global_params) features = model(images, training=training, features_only=True) features = tf.identity(features, 'global_pool') return features, model.endpoints
def build_model(images, model_name, training, override_params=None, model_dir=None, fine_tuning=False): """A helper functiion to creates a model and returns predicted logits. Args: images: input images tensor. model_name: string, the predefined model name. training: boolean, whether the model is constructed for training. override_params: A dictionary of params for overriding. Fields must exist in efficientnet_model.GlobalParams. model_dir: string, optional model dir for saving configs. fine_tuning: boolean, whether the model is used for finetuning. Returns: logits: the logits tensor of classes. endpoints: the endpoints for each layer. Raises: When model_name specified an undefined model, raises NotImplementedError. When override_params has invalid fields, raises ValueError. """ assert isinstance(images, tf.Tensor) if not training or fine_tuning: if not override_params: override_params = {} override_params['batch_norm'] = utils.BatchNormalization blocks_args, global_params = get_model_params(model_name, override_params) if not training or fine_tuning: global_params = global_params._replace( batch_norm=utils.BatchNormalization) if model_dir: param_file = os.path.join(model_dir, 'model_params.txt') if not tf.gfile.Exists(param_file): if not tf.gfile.Exists(model_dir): tf.gfile.MakeDirs(model_dir) with tf.gfile.GFile(param_file, 'w') as f: tf.logging.info('writing to %s' % param_file) f.write('model_name= %s\n\n' % model_name) f.write('global_params= %s\n\n' % str(global_params)) f.write('blocks_args= %s\n\n' % str(blocks_args)) with tf.variable_scope(model_name): model = efficientnet_model.Model(blocks_args, global_params) logits = model(images, training=training) logits = tf.identity(logits, 'logits') return logits, model.endpoints
def build_model(images, model_name, training, override_params=None, model_dir=None, fine_tuning=False, features_only=False, pooled_features_only=False): """A helper functiion to creates a model and returns predicted logits. Args: images: input images tensor. model_name: string, the predefined model name. training: boolean, whether the model is constructed for training. override_params: A dictionary of params for overriding. Fields must exist in efficientnet_model.GlobalParams. model_dir: string, optional model dir for saving configs. fine_tuning: boolean, whether the model is used for finetuning. features_only: build the base feature network only (excluding final 1x1 conv layer, global pooling, dropout and fc head). pooled_features_only: build the base network for features extraction (after 1x1 conv layer and global pooling, but before dropout and fc head). Returns: logits: the logits tensor of classes. endpoints: the endpoints for each layer. Raises: When model_name specified an undefined model, raises NotImplementedError. When override_params has invalid fields, raises ValueError. """ assert isinstance(images, tf.Tensor) assert not (features_only and pooled_features_only) # For backward compatibility. if override_params and override_params.get('drop_connect_rate', None): override_params['survival_prob'] = 1 - override_params['drop_connect_rate'] if not training or fine_tuning: if not override_params: override_params = {} override_params['batch_norm'] = utils.BatchNormalization if fine_tuning: override_params['relu_fn'] = functools.partial(swish, use_native=False) blocks_args, global_params = get_model_params(model_name, override_params) if model_dir: param_file = os.path.join(model_dir, 'model_params.txt') if not tf.gfile.Exists(param_file): if not tf.gfile.Exists(model_dir): tf.gfile.MakeDirs(model_dir) with tf.gfile.GFile(param_file, 'w') as f: logging.info('writing to %s', param_file) f.write('model_name= %s\n\n' % model_name) f.write('global_params= %s\n\n' % str(global_params)) f.write('blocks_args= %s\n\n' % str(blocks_args)) with tf.variable_scope(model_name): model = efficientnet_model.Model(blocks_args, global_params) outputs = model( images, training=training, features_only=features_only, pooled_features_only=pooled_features_only) if features_only: outputs = tf.identity(outputs, 'features') elif pooled_features_only: outputs = tf.identity(outputs, 'pooled_features') else: outputs = tf.identity(outputs, 'logits') return outputs, model.endpoints