def f(self, x, y): log.warn('Using default loss cross entropy') return tf.nn.softmax_cross_entropy_with_logits_v2(logits=x, labels=y)
def f(logits, y): log.warn('No testing metrics set') return tf.constant(0)
def optimize(loss): log.warn('No optimizer set')
def f(logits, ys): log.warn('No training metrics set') return logits, ys