# outer objective (validation error) (not weighted) val_loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(labels=y, logits=out)) accuracy = tf.reduce_mean( tf.cast(tf.equal(tf.argmax(y, 1), tf.argmax(out, 1)), tf.float32)) # optimizers # get an hyperparameter for the learning rate lr = far.get_hyperparameter('lr', 0.01) io_optim = far.GradientDescentOptimizer( lr) # for training error minimization an optimizer from far_ho is needed oo_optim = tf.train.AdamOptimizer( ) # for outer objective optimizer all optimizers from tf are valid print('hyperparameters to optimize') [print(h) for h in far.hyperparameters()] # build hyperparameter optimizer farho = far.HyperOptimizer() run = farho.minimize(val_loss, oo_optim, tr_loss, io_optim, init_dynamics_dict={ v: h for v, h in zip(tf.model_variables(), far.utils.hyperparameters()[:4]) }) print( 'Variables (or tensors) that will store the values of the hypergradients')
self + residual_block(self.out, 256) self + tcl.conv2d( self.out, 2048, 1, variables_collections=self.var_coll) self + tf.nn.avg_pool(self.out, [1, 6, 6, 1], [1, 6, 6, 1], 'SAME') self + tcl.conv2d( self.out, 512, 1, variables_collections=self.var_coll) self + tf.reshape(self.out, (-1, 512)) def for_input(self, new_input): return TCML_ResNet_Omniglot_v2(new_input, self.name, self.deterministic_initialization, True) def hr_res_net_tcml_Omniglot_builder_v2(): return lambda x, name: TCML_ResNet_Omniglot_v2(x, name=name) def hr_res_net_tcml_v1_builder(): return lambda x, name: TCML_ResNet(x, name=name) def hr_res_net_tcml_Omniglot_builder(): return lambda x, name: TCML_ResNet_Omniglot(x, name=name) if __name__ == '__main__': inp = tf.placeholder(tf.float32, (None, 84, 84, 3)) net = TCML_ResNet(inp) print(net.out) print(far.hyperparameters())