def __init__(self, n_classes, optimizer=None, n_filters=96, keep_prob=0.5, learning_rate=None, model_dir=None, config=None, warm_start_from=None, multi_gpu=False, n_examples=1.0, prior_path=None): params = { 'n_classes': n_classes, # If an instance of an optimizer is passed in, this will just # return it. 'optimizer': ( None if optimizer is None else get_optimizer_instance(optimizer, learning_rate)), 'n_filters': n_filters, 'n_examples': n_examples, 'prior_path': prior_path } _model_fn = model_fn if multi_gpu: params['optimizer'] = TowerOptimizer(params['optimizer']) _model_fn = replicate_model_fn(_model_fn) super(MeshNetBWN, self).__init__( model_fn=_model_fn, model_dir=model_dir, params=params, config=config, warm_start_from=warm_start_from)
def __init__(self, n_classes, optimizer=None, learning_rate=None, model_dir=None, config=None, warm_start_from=None, multi_gpu=False): params = { 'n_classes': n_classes, # If an instance of an optimizer is passed in, this will just # return it. 'optimizer': ( None if optimizer is None else get_optimizer_instance(optimizer, learning_rate)), } _model_fn = model_fn if multi_gpu: params['optimizer'] = TowerOptimizer(params['optimizer']) _model_fn = replicate_model_fn(_model_fn) super(HighRes3DNet, self).__init__( model_fn=_model_fn, model_dir=model_dir, params=params, config=config, warm_start_from=warm_start_from, )
def __init__(self, n_classes, optimizer, n_filters=64, n_examples=1.0, n_prior_samples=1.0, learning_rate=None, model_dir=None, config=None, warm_start_from=None, prior_path=None, multi_gpu=False, only_kld=False, is_mc='True'): print('Learning Rate: ' + str(learning_rate)) params = { 'n_classes': n_classes, # If an instance of an optimizer is passed in, this will just # return it. 'optimizer': get_optimizer_instance(optimizer, learning_rate), 'n_filters': n_filters, 'n_examples': n_examples, 'prior_path': prior_path, 'n_prior_samples': n_prior_samples, 'only_kld': only_kld, 'is_mc': is_mc } _model_fn = model_fn if multi_gpu: params['optimizer'] = TowerOptimizer(params['optimizer']) _model_fn = replicate_model_fn(_model_fn) super(MeshNetCWN, self).__init__(model_fn=_model_fn, model_dir=model_dir, params=params, config=config, warm_start_from=warm_start_from)
def _create_train_op(loss, learning_rate): optimizer = tf.train.RMSPropOptimizer(learning_rate=learning_rate, decay=0.9) optimizer = TowerOptimizer(optimizer) return slim.learning.create_train_op( loss, optimizer, global_step=tf.train.get_or_create_global_step())