def continue_em_training(epochNumber):
    initialEpoch = 103

    netRoot = "/home/oole/lymphoma_net/"
    runName = BASENAME + "_em_" + EMRUNSTAMP + "_cont/"
    modelName = "lymph_model"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")
    if not os.path.exists(netRoot + runName):
        os.makedirs(netRoot + runName)
    else:
        print("Run folder already extists.")

    old_em_savepath = netRoot + runName + BASENAME + "_em"
    em_train_savepath = netRoot + runName + BASENAME + "_em"
    logfile_path = netRoot + runName + BASENAME + "_net_log_em.csv"
    logreg_savepath = netRoot + runName + BASENAME + "_logreg"

    # load data
    # split into train val
    dataPath = "/home/oole/data_lymphoma/"
    trainSlideData = ldata.collect_data(dataPath)
    valSlideData = data_tf.SlideData(
        trainSlideData.getSlideList(),
        None,
        np.asarray(trainSlideData.getSlideList()).size,
        trainSlideData.getSlideLabelList(),
        trainSlideData.getLabelFunc(),
        False,
        labelencoder=trainSlideData.getLabelEncoder(),
        parseFunctionAugment=trainSlideData.getparseFunctionNormal(),
        parseFunction=trainSlideData.getparseFunctionNormal())

    train_em.emtrain(trainSlideData,
                     valSlideData,
                     old_em_savepath,
                     em_train_savepath,
                     BATCH_SIZE,
                     initial_epochnum=initialEpoch,
                     model_name=modelName,
                     spatial_smoothing=SPATIALSMOOTHING,
                     do_augment=True,
                     num_epochs=epochNumber,
                     dropout_ratio=DROPOUT_RATIO,
                     learning_rate=LEARNING_RATE,
                     sanity_check=False,
                     logfile_path=logfile_path,
                     logreg_savepath=logreg_savepath,
                     runName=runName,
                     netAcc=None,
                     buildNet=lnet.getLymphNet,
                     valIsTestData=True,
                     discriminativePatchFinderTrain=TRAIN_DISC_FINDER,
                     discriminativePatchFinderPredict=PRED_DISC_FINDER,
                     splitSeed=SPLIT_SEED)

    print("Data collected.")
Exemplo n.º 2
0
def continue_em_training():
    initialEpoch = 103

    netRoot = "/home/oole/lymphoma_net/"
    runName = "lymphoma_em_180815_2_cont/"
    modelName = "lymph_model"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")
    if not os.path.exists(netRoot + runName):
        os.makedirs(netRoot + runName)
    else:
        print("Run folder already extists.")

    old_em_savepath = netRoot + "lymphoma_em_180815_2/" + "lympf_em"
    em_train_savepath = netRoot + runName + "lympf_em"
    logfile_path = netRoot + runName + "lymph_net_log_em.csv"
    logreg_savepath = netRoot + runName + "lymph_logreg"

    # load data
    # split into train val
    dataPath = "/home/oole/data_lymphoma/"
    trainSlideData = ldata.collect_data(dataPath)
    valSlideData = data_tf.SlideData(
        trainSlideData.getSlideList(),
        None,
        np.asarray(trainSlideData.getSlideList()).size,
        trainSlideData.getSlideLabelList(),
        trainSlideData.getLabelFunc(),
        False,
        labelencoder=trainSlideData.getLabelEncoder(),
        parseFunctionAugment=trainSlideData.getparseFunctionNormal(),
        parseFunction=trainSlideData.getparseFunctionNormal())

    train_em.emtrain(trainSlideData,
                     valSlideData,
                     old_em_savepath,
                     em_train_savepath,
                     BATCH_SIZE,
                     initial_epochnum=initialEpoch,
                     model_name=modelName,
                     spatial_smoothing=SPATIALSMOOTHING,
                     do_augment=True,
                     num_epochs=100,
                     dropout_ratio=DROPOUT_RATIO,
                     learning_rate=LEARNING_RATE,
                     sanity_check=False,
                     logfile_path=logfile_path,
                     logreg_savepath=logreg_savepath,
                     runName=runName,
                     netAcc=None,
                     buildNet=lnet.getLymphNet)

    print("Data collected.")
Exemplo n.º 3
0
def train_augment():
    # simple_train_savepath = "/home/oole/tfnetsave/tfnet_full"
    simple_train_savepath = "/home/oole/tfnetsave/tfnet_em_full"
    em_train_savepath = "/home/oole/tfnetsave/tfnet_em_full"

    initial_epoch = 0

    train_datapath = "/home/oole/Data/training/patient_patches_jpg"
    # train_datapath = '/home/oole/tf_test_data/validation'
    val_datapath = "/home/oole/Data/validation/patient_patches_jpg"

    logfile_path = "/home/oole/tfnetsave/tfnet_em_full_log.csv"
    logreg_savepath = "/home/oole/tfnetsave/tfnet_em_full_logreg"

    model_name = "model"

    label_encoder = data_tf.labelencoder()

    train_slidelist, train_slide_dimensions, old_disc_patches, _ = data_tf.collect_data(
        train_datapath, batch_size)
    val_slidelist, _, _, _ = data_tf.collect_data(val_datapath, batch_size)

    train_patches = dataset.slidelist_to_patchlist(train_slidelist)
    val_patches = dataset.slidelist_to_patchlist(val_slidelist)
    np.random.shuffle(train_patches)
    np.random.shuffle(val_patches)

    # Initial training
    #train_accuracy, val_accuracy = train.train_net(train_patches, val_patches, num_epochs=2, batch_size=batch_size,
    #                                               savepath=simple_train_savepath, do_augment=True, model_name=model_name)

    #util.write_log_file(logfile_path, train_accuracy=train_accuracy, val_accuracy=val_accuracy)

    # Test continue training
    # train.train_net(train_patches, val_patches, num_epochs=2, batch_size=batch_size, savepath=simple_train_savepath,
    #                 loadpath=simple_train_savepath, do_augment=False, model_name="model")

    train_em.emtrain(train_datapath,
                     val_datapath,
                     simple_train_savepath,
                     em_train_savepath,
                     label_encoder,
                     batch_size,
                     initial_epochnum=initial_epoch,
                     model_name=model_name,
                     spatial_smoothing=False,
                     do_augment=True,
                     num_epochs=2,
                     dropout_ratio=0.5,
                     learning_rate=0.0005,
                     sanity_check=False,
                     logfile_path=logfile_path,
                     logreg_savepath=logreg_savepath)
def em_training(initialEpoch=2, epochNumber=198):

    netRoot = "/home/oole/lymphoma_net_vgg/"
    modelName = BASENAME + "_model"
    runName = BASENAME + "_em_" + EMRUNSTAMP + "/"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")
    if not os.path.exists(netRoot + runName):
        os.makedirs(netRoot + runName)
    else:
        print("Run folder already extists.")

    simple_train_loadpath = netRoot + BASENAME + "_simple_" + SIMPLERUNSTAMP + "/" + BASENAME + "_simple"
    em_train_savepath = netRoot + runName + BASENAME + "_em"
    logfile_path = netRoot + runName + BASENAME + "_net_log_em.csv"
    logreg_savepath = netRoot + runName + BASENAME + "_logreg"

    # load data
    # split into train val
    basePath = "/home/oole/data_lymphoma/"
    trainDataPath = basePath + "train/"
    testDataPath = basePath + "test/"
    trainSlideData = ldata.collect_data(trainDataPath)
    testSlideData = ldata.collect_data(testDataPath)

    train_em.emtrain(trainSlideData,
                     testSlideData,
                     simple_train_loadpath,
                     em_train_savepath,
                     BATCH_SIZE,
                     initial_epochnum=initialEpoch,
                     model_name=modelName,
                     spatial_smoothing=SPATIALSMOOTHING,
                     do_augment=True,
                     num_epochs=epochNumber,
                     dropout_ratio=DROPOUT_RATIO,
                     learning_rate=LEARNING_RATE,
                     sanity_check=False,
                     logfile_path=logfile_path,
                     logreg_savepath=logreg_savepath,
                     runName=runName,
                     netAcc=None,
                     buildNet=lnet.getLymphNet,
                     valIsTestData=True,
                     discriminativePatchFinderTrain=TRAIN_DISC_FINDER,
                     discriminativePatchFinderPredict=PRED_DISC_FINDER,
                     splitSeed=SPLIT_SEED)

    print("Data collected.")
Exemplo n.º 5
0
def em_training():
    initialEpoch = 2
    trainepochs = 200

    netRoot = "/home/oole/lymphoma_net/"
    runName = "lymphoma_em_180910/"
    modelName = "lymph_model"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")
    if not os.path.exists(netRoot + runName):
        os.makedirs(netRoot + runName)
    else:
        print("Run folder already extists.")

    simple_train_loadpath = netRoot + "lymphoma_simple_180910/" + "lymph_simple"
    em_train_savepath = netRoot + runName + "lympf_em"
    logfile_path = netRoot + runName + "lymph_net_log_em.csv"
    logreg_savepath = netRoot + runName + "lymph_logreg"

    # load data
    # split into train val
    basePath = "/home/oole/data_lymphoma/"
    trainDataPath = basePath + "train/"
    testDataPath = basePath + "test/"
    trainSlideData = ldata.collect_data(trainDataPath)
    testSlideData = ldata.collect_data(testDataPath)

    train_em.emtrain(trainSlideData,
                     testSlideData,
                     simple_train_loadpath,
                     em_train_savepath,
                     BATCH_SIZE,
                     initial_epochnum=initialEpoch,
                     model_name=modelName,
                     spatial_smoothing=SPATIALSMOOTHING,
                     do_augment=True,
                     num_epochs=trainepochs,
                     dropout_ratio=DROPOUT_RATIO,
                     learning_rate=LEARNING_RATE,
                     sanity_check=False,
                     logfile_path=logfile_path,
                     logreg_savepath=logreg_savepath,
                     runName=runName,
                     netAcc=None,
                     buildNet=lnet.getLymphNet,
                     valIsTestData=True)

    print("Data collected.")
Exemplo n.º 6
0
def em_training():
    initialEpoch = 2

    netRoot = "/home/oole/breasthistology_net/"
    runName = "breasthistology_em_180815_2/"
    modelName = "breasthistology_model"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")
    if not os.path.exists(netRoot + runName):
        os.makedirs(netRoot + runName)
    else:
        print("Run folder already extists.")

    simple_train_loadpath = netRoot + "breasthistology_simple_180815/" + "breasthistology_simple"
    em_train_savepath = netRoot + runName + "breasthistology_em"
    logfile_path = netRoot + runName + "breasthistology_net_log_em.csv"
    logreg_savepath = netRoot + runName + "breasthistology_logreg"

    # load data
    # split into train val
    trainDataPath = "/home/oole/breasthistology/training/"
    testDataPath = "/home/oole/breasthistology/testing/"
    trainSlideData = bdata.collect_data(trainDataPath)
    testSlideData = bdata.collect_data(testDataPath)

    train_em.emtrain(trainSlideData,
                     testSlideData,
                     simple_train_loadpath,
                     em_train_savepath,
                     BATCH_SIZE,
                     initial_epochnum=initialEpoch,
                     model_name=modelName,
                     spatial_smoothing=SPATIALSMOOTHING,
                     do_augment=True,
                     num_epochs=100,
                     dropout_ratio=DROPOUT_RATIO,
                     learning_rate=LEARNING_RATE,
                     sanity_check=False,
                     logfile_path=logfile_path,
                     logreg_savepath=logreg_savepath,
                     runName=runName,
                     netAcc=None,
                     buildNet=bnet.getBreasthistoNet,
                     valIsTestData=True)

    print("Data collected.")
def cross_val_training(foldToTrain, numberOfEpochs=30):
    initialEpoch = 0

    netRoot = "/home/oole/lymphoma_net_vgg/"
    modelName = BASENAME + "_model"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")

    # load data
    # split into train val
    basePath = "/home/oole/data_lymphoma_full/"
    trainDataPath = basePath + "train/"
    # testDataPath = basePath + "test/"
    trainSlideData = ldata.collect_data(trainDataPath)
    testSlideData = trainSlideData

    sess = tf.Session()

    netAcc = None

    foldNum = 1

    for trainSlideData, testSLideData in data_tf.KFoldSlideList(
            trainSlideData, testSlideData, numberOfSplits=5, splitSeed=1337):
        print("Fold number: " + str(foldNum))
        if foldNum == foldToTrain:
            # train slide data should consist of all available training data
            # is split into test and training
            runName = BASENAME + "_simple_" + SIMPLERUNSTAMP + "_k-" + str(
                foldNum) + "-5" + "/"

            if not os.path.exists(netRoot + runName):
                os.makedirs(netRoot + runName)
            else:
                print("Run folder already extists.")

            train_savepath = netRoot + runName + BASENAME + "_k-" + str(
                foldNum) + "-5"
            logreg_savepath = netRoot + runName + BASENAME + "_logreg" + "_k-" + str(
                foldNum) + "-5"
            logfile_path = netRoot + runName + BASENAME + "_k-" + str(
                foldNum) + "-5" + "_net_log_em.csv"

            if netAcc is not None:
                netAcc.getSummmaryWriter(runName, sess.graph, forceNew=True)

            _, _, netAcc = train.train_net(trainSlideData,
                                           testSlideData,
                                           num_epochs=2,
                                           batch_size=BATCH_SIZE,
                                           savepath=train_savepath,
                                           do_augment=True,
                                           model_name=modelName,
                                           getlabel_train=ldata.getlabel,
                                           runName=runName,
                                           lr=LEARNING_RATE,
                                           buildNet=lnet.getLymphNet,
                                           valIsTestData=True,
                                           splitSeed=SPLIT_SEED,
                                           sess=sess,
                                           netAcc=netAcc,
                                           initialEpoch=initialEpoch,
                                           verbose=1,
                                           do_simple_validation=False)

            print("Finished Simple Training")

            train_em.emtrain(trainSlideData,
                             testSlideData,
                             None,
                             train_savepath,
                             BATCH_SIZE,
                             initial_epochnum=initialEpoch + 2,
                             model_name=modelName,
                             spatial_smoothing=SPATIALSMOOTHING,
                             do_augment=True,
                             num_epochs=initialEpoch + numberOfEpochs,
                             dropout_ratio=DROPOUT_RATIO,
                             learning_rate=LEARNING_RATE,
                             sanity_check=False,
                             logreg_savepath=logreg_savepath,
                             runName=runName,
                             netAcc=netAcc,
                             valIsTestData=True,
                             discriminativePatchFinderTrain=TRAIN_DISC_FINDER,
                             discriminativePatchFinderPredict=PRED_DISC_FINDER,
                             splitSeed=SPLIT_SEED,
                             sess=sess,
                             verbose=1,
                             do_simple_validation=False)

        foldNum = foldNum + 1