def train_and_validate(init_learning_rate_log, weight_decay_log): tf.reset_default_graph() graph = tf.get_default_graph() config = tf.ConfigProto() config.gpu_options.allow_growth = True hp_d['init_learning_rate'] = 10**init_learning_rate_log hp_d['weight_decay'] = 10**weight_decay_log model = ConvNet([227, 227, 3], 2, **hp_d) evaluator = Evaluator() optimizer = Optimizer(model, train_set, evaluator, val_set=val_set, **hp_d) sess = tf.Session(graph=graph, config=config) train_results = optimizer.train(sess, details=True, verbose=True, **hp_d) # Return the maximum validation score as target best_val_score = np.max(train_results['eval_scores']) return best_val_score
""" 2. Set test hyperparameters """ hp_d = dict() image_mean = np.load('/tmp/asirra_mean.npy') # load mean image hp_d['image_mean'] = image_mean # FIXME: Test hyperparameters hp_d['batch_size'] = 256 hp_d['augment_pred'] = True """ 3. Build graph, load weights, initialize a session and start test """ # Initialize graph = tf.get_default_graph() config = tf.ConfigProto() # config.gpu_options.allow_growth = True model = ConvNet([227, 227, 3], 2, **hp_d) evaluator = Evaluator() saver = tf.train.Saver() sess = tf.Session(graph=graph, config=config) saver.restore(sess, '/tmp/model.ckpt') # restore learned weights ''' img_url = [ 'http://assets.shockpedia.com/app/uploads/2017/10/29091359/puppy-1.jpg', 'https://vetstreet.brightspotcdn.com/dims4/default/5b3ffe7/2147483647/thumbnail/180x180/quality/90/?url=https%3A%2F%2Fvetstreet-brightspot.s3.amazonaws.com%2F8e%2F4e3910c36111e0bfca0050568d6ceb%2Ffile%2Fhub-dogs-puppy.jpg', 'https://www.petfinder.com/images/breeds/dog/1460.jpg', 'https://d4n5pyzr6ibrc.cloudfront.net/media/27FB7F0C-9885-42A6-9E0C19C35242B5AC/4785B1C2-8734-405D-96DC23A6A32F256B/thul-90efb785-97af-5e51-94cf-503fc81b6940.jpg?response-content-disposition=inline', 'http://www.bristol.ac.uk/media-library/sites/vetscience/migrated/images/catstudymonte.jpg', 'https://ichef.bbci.co.uk/images/ic/480xn/p04gr933.jpg' ] imgs = []