Пример #1
0
def ComputeScores(list_rgb,
                  dic_gt,
                  dic_prob,
                  p1,
                  p2,
                  keep_memory=False,
                  path_save='./tmp'):
    res_AJI = []
    res_F1 = []
    res_DSB = []
    res_ps = []
    res_TP = []
    res_FN = []
    res_FP = []
    for path in list_rgb:
        GT = imread(dic_gt[path])
        GT = PostProcess(GT, 1, 0)
        DImg = dic_prob[path]
        DImg[DImg < 0] = 0
        DImg = DImg.astype("uint8")
        S = PostProcess(DImg, p1, p2)
        res_AJI.append(AJI_fast(GT, S))
        res_F1.append(ComputeF1(GT, S))
        scores, p_s, TP, FN, FP = DataScienceBowlMetrics(GT, S)
        res_DSB.append(scores)
        res_ps.append(p_s)
        res_TP.append(TP)
        res_FN.append(FN)
        res_FP.append(FP)
        if keep_memory:
            img_mean = np.mean(imread(path)[:, :, 0:3])
            if img_mean < 125:
                color_cont = False
            else:
                color_cont = True
            OUT = join(path_save, basename(path).replace('.png', ''))
            CheckOrCreate(OUT)
            os.symlink(abspath(path), join(OUT, "rgb.png"))
            os.symlink(abspath(dic_gt[path]), join(OUT, "bin.png"))
            imsave(join(OUT, "colored_bin.png"), color_bin(label(GT)))
            imsave(join(OUT, "colored_pred.png"), color_bin(S))
            imsave(join(OUT, "output_DNN.png"), DImg)
            imsave(join(OUT, "contours_gt.png"),
                   Overlay_with_pred(path, GT, color_cont).astype('uint8'))
            imsave(join(OUT, "contours_pred.png"),
                   Overlay_with_pred(path, S, color_cont).astype('uint8'))


#            pdb.set_trace()
    if keep_memory:
        return res_AJI, res_F1, res_DSB, res_ps, res_TP, res_FN, res_FP
    else:
        return np.mean(res_AJI), np.mean(res_F1), np.mean(res_DSB)
Пример #2
0
def ComputeScores(list_rgb,
                  dic_gt,
                  dic_prob,
                  p2,
                  keep_memory=False,
                  path_save='./tmp'):
    res_AJI = []
    res_F1 = []
    res_DSB = []
    res_ps = []
    res_TP = []
    res_FN = []
    res_FP = []
    for path in list_rgb:
        GT = imread(dic_gt[path])
        GT = Label3(GT)
        S = MultiClassToBinMap(dic_prob[path], p2)
        res_AJI.append(AJI_fast(GT, S))
        res_F1.append(ComputeF1(GT, S))
        scores, p_s, TP, FN, FP = DataScienceBowlMetrics(GT, S)
        res_DSB.append(scores)
        res_ps.append(p_s)
        res_TP.append(TP)
        res_FN.append(FN)
        res_FP.append(FP)
        if keep_memory:
            img_mean = np.mean(imread(path)[:, :, 0:3])
            if img_mean < 125:
                color_cont = False
            else:
                color_cont = True
            OUT = join(path_save, basename(path).replace('.png', ''))
            CheckOrCreate(OUT)
            os.symlink(abspath(path), join(OUT, "rgb.png"))
            os.symlink(abspath(dic_gt[path]), join(OUT, "bin.png"))
            imsave(join(OUT, "colored_bin.png"), color_bin(label(GT)))
            imsave(join(OUT, "colored_pred.png"), color_bin(S.astype(int)))
            imsave(join(OUT, "contours_gt.png"),
                   Overlay(path, dic_gt[path], color_cont).astype('uint8'))
            imsave(join(OUT, "output_class0.png"),
                   img_as_ubyte(dic_prob[path][:, :, 0]))
            imsave(join(OUT, "output_class1.png"),
                   img_as_ubyte(dic_prob[path][:, :, 1]))
            imsave(join(OUT, "output_class2.png"),
                   img_as_ubyte(dic_prob[path][:, :, 2]))
            # imsave(join(OUT, "contours_pred.png"), Overlay_with_pred(path, S, color_cont).astype('uint8'))


#            pdb.set_trace()
    if keep_memory:
        return res_AJI, res_F1, res_DSB, res_ps, res_TP, res_FN, res_FP
    else:
        return np.mean(res_AJI), np.mean(res_F1), np.mean(res_DSB)
Пример #3
0
        pbar = ProgressBar()
        for img_test_path in pbar(test_images):
            dic_pred[img_test_path] = model.pred(img_test_path)[0]
        tf.reset_default_graph(
        )  # so that it can restore properly the next model

    HP_dic = {}
    for p1 in P1_List:
        for p2 in P2_list:
            HP_dic[(p1, p2)] = ComputeScores(test_img_all, dic_test_gt_all,
                                             dic_pred, p1, p2)
    tab = pd.DataFrame.from_dict(HP_dic, orient='index')
    tab.columns = ['AJI', 'F1', 'DSB']
    tab.to_csv('Hyper_parameter_selection.csv')
    P1, P2 = tab["DSB"].idxmax()
    CheckOrCreate(options.output)
    aji__, f1__, DSB__, ps__, tp__, fn__, fp__ = ComputeScores(
        test_img_all, dic_test_gt_all, dic_pred, p1, p2, True, options.output)
    ps__, tp__, fn__, fp__ = [np.array(el) for el in [ps__, tp__, fn__, fp__]]
    pathsss = [
        join(options.output,
             basename(path).replace('.png', '')) for path in test_img_all
    ]
    df_dic = {'path': pathsss, 'F1': f1__, 'AJI': aji__, 'DSB': DSB__}
    for k, t in enumerate(np.arange(0.5, 1, 0.05)):
        df_dic['precision_t_{}'.format(t)] = ps__[:, k]
        df_dic['tp_t_{}'.format(t)] = tp__[:, k]
        df_dic['fn_t_{}'.format(t)] = fn__[:, k]
        df_dic['fp_t_{}'.format(t)] = fp__[:, k]
    tab_values = pd.DataFrame.from_dict(df_dic)
Пример #4
0
                           IMAGE_SIZE=(212, 212),
                           NUM_LABELS=2,
                           NUM_CHANNELS=4,
                           LOG=LOG,
                           N_FEATURES=N_FEATURES,
                           N_THREADS=50,
                           MEAN_FILE=MEAN_FILE)
        for __ in FILES:
            prediction = model.pred(__)
            dic[__].append(prediction)
        tf.reset_default_graph(
        )  # so that it can restore properly the next model

    dic_final_pred = {}
    dic_prob = {}
    CheckOrCreate(options.output_sample)
    for key in dic.keys():
        OUT_ID = join(options.output_sample, basename(key).replace('.png', ''))
        CheckOrCreate(OUT_ID)
        dic_prob[key] = np.mean(np.concatenate(dic[key]), axis=0)[:, :, 1]
        dic_final_pred[key] = PostProcess(dic_prob[key], P1, P2)
        # dic_final_pred[key] = (dic_prob[key] > P2).astype('uint8')
        dic_final_pred[key] = label(dic_final_pred[key])
        dic_final_pred[key] = remove_small_objects(dic_final_pred[key], 32)
        img_mean = np.mean(imread(key)[:, :, 0:3])
        if img_mean < 125:
            color_cont = False
        else:
            color_cont = True
        #put rgb image
        os.symlink(abspath(key), join(OUT_ID, "rgb.png"))
            colorout[:,:,i] = normalize_single_channel(temp, maxval=255, minval=0, inv=False)
        imout = colorout.astype(np.uint8)

    return(imout)

if __name__ == '__main__':
    parser = OptionParser()
    parser.add_option('--input', dest="input", type="str")
    parser.add_option('--output', dest="output", type="str")
    (options, args) = parser.parse_args()

    input_folder = options.input 
    if not os.path.isdir(input_folder):
        raise ValueError("folder %s does not exist" % input_folder)
    output_folder = options.output
    CheckOrCreate(output_folder)

    for root, dirs, files in os.walk(input_folder):        
        local_output_folder = root.replace(input_folder, output_folder)
        CheckOrCreate(local_output_folder)
        for filename in files:
            if filename[0] == ".":
                continue
            if filename.endswith('_mask.png'):
                shutil.copy(os.path.join(root, filename), local_output_folder)
            else:
                imin = skimage.io.imread(os.path.join(root, filename))
                imout = normalize_multi_channel(imin)
                skimage.io.imsave(os.path.join(local_output_folder, filename), imout)

    print 'DONE'
from optparse import OptionParser
import pdb


def GRN():
    return str(uuid.uuid4()).replace("-", "")


parser = OptionParser()
parser.add_option('--input', dest="input", type="str")
(options, args) = parser.parse_args()

TNBC = options.input
OUT = "."

PNG = glob(TNBC + "/Slide_*/*.png")

for png in PNG:
    mask = png.replace('/Slide_', '/GT_')
    name = GRN()
    CheckOrCreate(join(OUT, name))
    CheckOrCreate(join(OUT, name, 'images'))
    CheckOrCreate(join(OUT, name, 'masks'))
    os.symlink(os.path.abspath(png), join(OUT, name, 'images', name + ".png"))
    labels = label(imread(mask))
    for i in range(1, labels.max() + 1):
        single = np.zeros_like(labels, dtype='uint8')
        single[labels == i] = 255
        name_sing = GRN()
        imsave(join(OUT, name, 'masks', name_sing + ".png"), single)
Пример #7
0
def save_path(output, rgb, original):
    out = rgb.replace(original, output)
    f = os.path.abspath(os.path.join(out, os.pardir))
    CheckOrCreate(f)
    return out
    mask[line > 0] = 0
    mask = distance_transform_cdt(mask)
    imsave(join(dst_mask, basename(mask_name)), mask)


if __name__ == '__main__':
    parser = OptionParser()
    parser.add_option('--input', dest="input", type="str")
    parser.add_option('--output', dest="output", type="str")
    parser.add_option('--splits', dest="splits", type="int")
    (options, args) = parser.parse_args()

    table = pd.read_csv(options.input, index_col=0)
    train = table[table["train"] == 1]
    test = table[table["train"] == 0]
    CheckOrCreate(options.output)
    Create_Key_PerGroup(train, DOMAIN_VARIABLE)

    skf = StratifiedKFold(n_splits=options.splits)
    k = 0
    Fold_k_S = join(options.output, 'Slide_{}')
    Fold_k_M = join(options.output, 'GT_{}')
    for train_index, test_index in skf.split(train, train['group']):
        test_fold = train.ix[test_index]

        CheckOrCreate(Fold_k_S.format(k))
        CheckOrCreate(Fold_k_M.format(k))
        test_fold.apply(lambda row: Symlink_Mask(row, Fold_k_S.format(k),
                                                 Fold_k_M.format(k)),
                        axis=1)
        k += 1
Пример #9
0
    def train(self, list_img, dic, output_csv):
        data_res = pd.DataFrame()

        epoch = self.STEPS * self.BATCH_SIZE // self.N_EPOCH

        self.LearningRateSchedule(self.LEARNING_RATE, self.K, epoch)

        trainable_var = tf.trainable_variables()

        self.regularize_model()
        self.optimization(trainable_var)
        self.ExponentialMovingAverage(trainable_var, self.DECAY_EMA)

        self.summary_test_writer = tf.summary.FileWriter(self.LOG + '/test',
                                                         graph=self.sess.graph)

        self.summary_writer = tf.summary.FileWriter(self.LOG + '/train',
                                                    graph=self.sess.graph)
        self.merged_summary = tf.summary.merge_all()
        steps = self.STEPS

        init_op = tf.group(tf.global_variables_initializer(),
                           tf.local_variables_initializer())
        self.sess.run(init_op)
        #self.sess.run(self.init_data)
        early_finish = False
        CheckOrCreate("./confusion_matrix_train")
        CheckOrCreate("./confusion_matrix_test")
        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(coord=coord)

        for step in range(steps):
            # print "saving images"
            # self.optimizer is replaced by self.training_op for the exponential moving decay
            _, l, lr, predictions, batch_labels, s = self.sess.run([
                self.training_op, self.loss, self.learning_rate,
                self.train_prediction, self.train_labels_node,
                self.merged_summary
            ])
            if step % self.N_PRINT == 0:
                if step != 0:
                    i = datetime.now()
                    print i.strftime('%Y/%m/%d %H:%M:%S: \n ')
                    self.summary_writer.add_summary(s, step)
                    error, acc, cm_train = self.error_rate(
                        predictions, batch_labels, step)
                    print('  Step %d of %d' % (step, steps))
                    print('  Learning rate: %.5f \n') % lr
                    print(
                        '  Mini-batch loss: %.5f \n       Accuracy: %.1f%% \n'
                        % (l, acc))
                    l, acc, cm, wgt_path = self.Validation(list_img, dic, step)
                    data_res.loc[step, "loss"] = l
                    data_res.loc[step, "acc"] = acc
                    data_res.loc[step, "wgt_path"] = abspath(wgt_path)
                    np.save(
                        "./confusion_matrix_train" + "/cm_{}.npy".format(step),
                        cm_train)
                    np.save(
                        "./confusion_matrix_test" + "/cm_{}.npy".format(step),
                        cm)
                    if self.early_stopping(data_res, "acc"):
                        best_wgt = np.array(
                            data_res["wgt_path"])[-(self.early_stopping_max +
                                                    1)]
                        make_it_seem_new = self.LOG + '/' + "model.ckpt-{}".format(
                            step + 10)
                        os.symlink(best_wgt + ".data-00000-of-00001",
                                   make_it_seem_new + ".data-00000-of-00001")
                        os.symlink(best_wgt + ".index",
                                   make_it_seem_new + ".index")
                        os.symlink(best_wgt + ".meta",
                                   make_it_seem_new + ".meta")
                        early_finish = True
                        break
        if not early_finish:
            best_wgt = np.array(
                data_res["wgt_path"])[-(self.early_stopping_max + 1)]
            make_it_seem_new = self.LOG + '/' + "model.ckpt-{}".format(step +
                                                                       10)
            os.symlink(best_wgt + ".data-00000-of-00001",
                       make_it_seem_new + ".data-00000-of-00001")
            os.symlink(best_wgt + ".index", make_it_seem_new + ".index")
            os.symlink(best_wgt + ".meta", make_it_seem_new + ".meta")
        coord.request_stop()
        coord.join(threads)
        data_res.to_csv(output_csv)
Пример #10
0

def ComputeScore(list_csv, var_name):
    scores = np.zeros(len(list_csv), dtype="float")
    epoch_number = np.zeros(len(list_csv), dtype="float")
    for i, csv in enumerate(list_csv):
        tables = pd.read_csv(csv, index_col=0)
        scores[i] = tables[[var_name]].max()
        epoch_number[i] = tables.shape[0] - 10
    return np.concatenate([scores, epoch_number])


if __name__ == '__main__':
    hyper_param_test = GetNames()
    dic = {}
    try:
        for key in hyper_param_test.keys():
            dic[key] = ComputeScore(hyper_param_test[key], "F1")
    except:
        for key in hyper_param_test.keys():
            dic[key] = ComputeScore(hyper_param_test[key], "acc")
    n = len(hyper_param_test[key])
    tab = pd.DataFrame.from_dict(dic, orient='index')
    mean = tab[[i for i in range(n)]].mean(axis=1)
    std = tab[[i for i in range(n)]].std(axis=1)
    tab["mean"] = mean
    tab["std"] = std
    best = mean.argmax()
    CheckOrCreate(best)
    tab.to_csv('test_tables.csv')
Пример #11
0
    return res.astype('uint8')


def PostProcessGuess(name, img):
    if "Dist" in name:
        return PostProcess(img, 0, 0.5)
    else:
        img = img.astype('float')
        img = img / 255.
        return PostProcess(img, 14, 0.5)


if __name__ == '__main__':
    list_models = ["UNetHistogramTW2", "UNetDistHistogramTW2", "Test"]
    OUTPUT = join(PATH, "Sum")
    CheckOrCreate(OUTPUT)

    general_dic = GatherMultipleModels(list_models, "Test")
    out_tag = "output_DNN_mean"
    for name, dic in Model_gen(general_dic,
                               tags=[out_tag, "colored_pred", "rgb"]):
        OUTPUT_img = join(OUTPUT, name)
        CheckOrCreate(OUTPUT_img)
        # for el in dic["output_DNN_mean"].keys():
        #     imsave(join(OUTPUT_img, "output_DNN__" + el + ".png"), dic["output_DNN_mean"][el])
        list_pred = []
        for el in dic[out_tag].keys():
            colored_bin = PostProcessGuess(el, dic[out_tag][el])
            colored_bin[colored_bin > 0] = 255
            list_pred.append(colored_bin)
        # pdb.set_trace()
Пример #12
0
from patch_img import Contours


def GenerateGT(input, output):
    files_GT = os.path.join(input, "GT_*", "*.png")
    for gt_path in glob(files_GT):
        out_path = gt_path.replace(input, output)
        yield gt_path, out_path


def PutContoursTo2(gt, size=3):
    gt = imread(gt)
    if len(gt.shape) > 2:
        gt = gt[:, :, 0]
    gt[gt > 0] = 1
    contours = Contours(gt, size)
    gt[contours > 0] = 2
    return gt


if __name__ == '__main__':
    parser = OptionParser()
    parser.add_option('--input', dest="input", type="str")
    parser.add_option('--output', dest="output", type="str")
    (options, args) = parser.parse_args()
    #CheckOrCreate(options.output)
    for gt_, out_ in GenerateGT(options.input, options.output):
        CheckOrCreate(os.path.dirname(out_))
        result = PutContoursTo2(gt_)
        imsave(out_, result)
    parser.add_option('--train', action='store_true')

    # actions: predict yes/no
    parser.add_option('--predict_train', action='store_true')

    # actions: predict yes/no
    parser.add_option('--predict_test', action='store_true')

    (options, args) = parser.parse_args()

    input_folder = options.input
    if not os.path.isdir(input_folder):
        raise ValueError("folder %s does not exist" % input_folder)

    output_folder = options.output
    CheckOrCreate(output_folder)

    model_folder = options.modelfolder
    CheckOrCreate(model_folder)

    show_folder = options.showfolder
    CheckOrCreate(show_folder)

    nn_names = [x.strip() for x in options.nn_names.split(',')]

    ec = EdgeClassifier(input_folder, output_folder, model_folder, show_folder,
                        nn_names)

    if options.feature_extraction:
        # feature extraction
        ts = ec.make_edge_training_set()