False,
    dropout_rate=0.0,
    batch_normalization_statistic=False,
    bn_params=bn_params[1])
net_outs2, variables2, _ = tft.volume_bndo_flbias_l6_40(
    volumes_reshape[2],
    False,
    dropout_rate=0.0,
    batch_normalization_statistic=False,
    bn_params=bn_params[2])
if FUSION_MODE == 'late':
    features = [
        net_outs0['flattened_out'], net_outs1['flattened_out'],
        net_outs2['fc1_out']
    ]
    _, softmax_out, variables_fusion = tft.late_fusion(features, True)
elif FUSION_MODE == 'committe':
    predictions = [
        net_outs0['sm_out'], net_outs1['sm_out'], net_outs2['sm_out']
    ]
    softmax_out = tft.committe_fusion(predictions)
correct_prediction = tf.equal(tf.argmax(softmax_out, 1),
                              tf.argmax(real_label, 1))
batch_accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

if not AUGMENTATION:
    correct_output = open(
        vision_path + "/correct_predictions_" + FUSION_MODE + "_subset9.log",
        "w")
    incorrect_output = open(
        vision_path + "/incorrect_predictions_" + FUSION_MODE + "_subset9.log",
        predictions = [
            outputs0['sm_out'], outputs1['sm_out'], outputs2['sm_out']
        ]
        combined_prediction = tft.vote_fusion(predictions)
        combined_prediction = tf.reshape(combined_prediction, [-1, 1])
    elif FUSION_MODE == 'committe':
        predictions = [
            outputs0['sm_out'], outputs1['sm_out'], outputs2['sm_out']
        ]
        combined_prediction = tft.committe_fusion(predictions)
    elif FUSION_MODE == 'late':
        features = [
            outputs0['flattened_out'], outputs1['flattened_out'],
            outputs2['flattened_out']
        ]
        _, combined_prediction, variables_fusion = tft.late_fusion(
            features, False)
    else:
        print("unknown fusion mode")
        exit()

    saver0 = tf.train.Saver(variables0)
    saver1 = tf.train.Saver(variables1)
    saver2 = tf.train.Saver(variables2)
    if FUSION_MODE == 'late':
        saver_fusion = tf.train.Saver(variables_fusion)
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    saver0.restore(sess, net_files[0])
    saver1.restore(sess, net_files[1])
    saver2.restore(sess, net_files[2])
Ejemplo n.º 3
0
    positive_confidence,
    dropout_rate=0.0,
    batch_normalization_statistic=False,
    bn_params=bn_params[1])
net_outs2, variables2, _ = tft.volume_bndo_flbias_l6_40(
    volumes_reshape[2],
    False,
    positive_confidence,
    dropout_rate=0.0,
    batch_normalization_statistic=False,
    bn_params=bn_params[2])
features = [
    net_outs0['flattened_out'], net_outs1['flattened_out'],
    net_outs2['fc1_out']
]
net_out, sm_out, fusion_variables = tft.late_fusion(features, True,
                                                    positive_confidence)
loss = tf.nn.softmax_cross_entropy_with_logits(logits=net_out,
                                               labels=real_label)
if type(LOSS_BALANCING) == float:
    balancing_factor = LOSS_BALANCING
else:
    balancing_factor = positive_confidence
balancing = real_label[:, 0] + tf.pow(tf.constant(-1, dtype=tf.float32),
                                      real_label[:, 0]) * tf.constant(
                                          balancing_factor, tf.float32)
modulation = tf.pow(
    real_label[:, 0] +
    tf.pow(tf.constant(-1, dtype=tf.float32), real_label[:, 0]) * sm_out[:, 0],
    tf.constant(2, dtype=tf.float32))
if LOSS_BALANCING:
    loss = balancing * loss