Example #1
0
def eval_cnn(preds, placeholders, sess, graph, inputs, outputs, _):
    with graph.as_default():
        res = sess.run(tf.nn.softmax(preds["n1"]),
                       feed_dict={placeholders["i0"]: inputs["i0"]})
        sess.close()

        return evolution.accuracy_error(res, outputs["o0"]),
Example #2
0
def eval_sequential(preds, placeholders, sess, graph, inputs, outputs, _):
    """
    Here we compute the fitness of the model. It is used by the evolutionary internally and always is provided with the same parameters
    :param preds: Dictionary created in the arranging and training function
    :param placeholders: (Only) Input placeholders: ("i0", "i1", ..., "im").
    :param sess: tf session to perform inference
    :param graph: tf graph in which inference is performed
    :param inputs: Data inputs for the model
    :param outputs: Data outputs for the metric
    :param _: hyperparameters, because we are evolving the optimizer selection and learning rate, they are unused when testing
    :return: fitness of the model (as a tuple)
    """
    with graph.as_default():
        res = sess.run(tf.nn.softmax(preds["n1"]), feed_dict={placeholders["i0"]: inputs["i0"]})
        sess.close()

        return accuracy_error(res, outputs["o0"]),