printLosses(all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, os.path.join(results_dir, "%s.png" % EXPERIMENT_NAME), n_feedbacks_per_epoch, auc_scores=auc_scores, auc_labels=["brain", "1", "2", "3", "4"], ylim_score=(0,0.08))
        # loss, acc = train_fn(data, convert_seg_map_for_crossentropy(seg, range(4)).astype(np.float32))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
        loss_vec, acc = train_fn(data, seg_flat, w) #class_weights[seg_flat]
        loss = loss_vec.mean()
        loss_per_sample = loss_vec.reshape(BATCH_SIZE, -1).mean(axis=1)
        losses = update_losses(losses, idx, loss_per_sample)
        train_loss += loss
        train_loss_tmp += loss
        train_acc_tmp += acc
        batch_ctr += 1
        if batch_ctr > n_batches_per_epoch:
            break

    data_gen_train._finish()

    train_loss /= n_batches_per_epoch
    print "training loss average on epoch: ", train_loss
    if epoch <= 1:
        losses[:] = 100.

    y_true = []
    y_pred = []
    test_loss = 0
    accuracies = []
    valid_batch_ctr = 0
    for data, seg, labels in data_gen_validation:
        # loss, acc = val_fn(data, convert_seg_map_for_crossentropy(seg, range(4)).astype(np.float32))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]