Пример #1
0
def em_training():
    initialEpoch = 2
    trainepochs = 200

    netRoot = "/home/oole/lymphoma_net/"
    runName = "lymphoma_em_180910/"
    modelName = "lymph_model"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")
    if not os.path.exists(netRoot + runName):
        os.makedirs(netRoot + runName)
    else:
        print("Run folder already extists.")

    simple_train_loadpath = netRoot + "lymphoma_simple_180910/" + "lymph_simple"
    em_train_savepath = netRoot + runName + "lympf_em"
    logfile_path = netRoot + runName + "lymph_net_log_em.csv"
    logreg_savepath = netRoot + runName + "lymph_logreg"

    # load data
    # split into train val
    basePath = "/home/oole/data_lymphoma/"
    trainDataPath = basePath + "train/"
    testDataPath = basePath + "test/"
    trainSlideData = ldata.collect_data(trainDataPath)
    testSlideData = ldata.collect_data(testDataPath)

    train_em.emtrain(trainSlideData,
                     testSlideData,
                     simple_train_loadpath,
                     em_train_savepath,
                     BATCH_SIZE,
                     initial_epochnum=initialEpoch,
                     model_name=modelName,
                     spatial_smoothing=SPATIALSMOOTHING,
                     do_augment=True,
                     num_epochs=trainepochs,
                     dropout_ratio=DROPOUT_RATIO,
                     learning_rate=LEARNING_RATE,
                     sanity_check=False,
                     logfile_path=logfile_path,
                     logreg_savepath=logreg_savepath,
                     runName=runName,
                     netAcc=None,
                     buildNet=lnet.getLymphNet,
                     valIsTestData=True)

    print("Data collected.")
Пример #2
0
def continue_simple_training():
    epochs = 100
    initialEpoch = 100

    netRoot = "/home/oole/lymphoma_net/"
    runName = "lymphoma_simple_180907_cont_sanity/"
    modelName = "lymph_model"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")
    if not os.path.exists(netRoot + runName):
        os.makedirs(netRoot + runName)
    else:
        print("Run folder already extists.")

    old_simple_savepath = netRoot + "lymphoma_simple_180907/" + "lymph_simple"
    simple_cont_savepath = netRoot + runName + modelName
    logfile_path = netRoot + runName + "lymph_net_log.csv"
    logreg_savepath = netRoot + runName + "lymph_logreg"

    # load data
    # split into train val
    basePath = "/home/oole/data_lymphoma/"
    trainDataPath = basePath + "train/"
    testDataPath = basePath + "test/"
    trainSlideData = ldata.collect_data(trainDataPath)
    testSlideData = ldata.collect_data(testDataPath)

    train.train_net(trainSlideData,
                    testSlideData,
                    num_epochs=epochs,
                    batch_size=BATCH_SIZE,
                    savepath=simple_cont_savepath,
                    do_augment=True,
                    model_name=modelName,
                    getlabel_train=ldata.getlabel,
                    log_savepath=logreg_savepath,
                    runName=runName,
                    lr=LEARNING_RATE,
                    buildNet=lnet.getLymphNet,
                    valIsTestData=True,
                    initialEpoch=initialEpoch,
                    loadpath=old_simple_savepath)

    print("Data collected.")
Пример #3
0
def continue_em_training():
    initialEpoch = 103

    netRoot = "/home/oole/lymphoma_net/"
    runName = "lymphoma_em_180815_2_cont/"
    modelName = "lymph_model"

    if not os.path.exists(netRoot):
        os.makedirs(netRoot)
    else:
        print("Net root folder already extists.")
    if not os.path.exists(netRoot + runName):
        os.makedirs(netRoot + runName)
    else:
        print("Run folder already extists.")

    old_em_savepath = netRoot + "lymphoma_em_180815_2/" + "lympf_em"
    em_train_savepath = netRoot + runName + "lympf_em"
    logfile_path = netRoot + runName + "lymph_net_log_em.csv"
    logreg_savepath = netRoot + runName + "lymph_logreg"

    # load data
    # split into train val
    dataPath = "/home/oole/data_lymphoma/"
    trainSlideData = ldata.collect_data(dataPath)
    valSlideData = data_tf.SlideData(
        trainSlideData.getSlideList(),
        None,
        np.asarray(trainSlideData.getSlideList()).size,
        trainSlideData.getSlideLabelList(),
        trainSlideData.getLabelFunc(),
        False,
        labelencoder=trainSlideData.getLabelEncoder(),
        parseFunctionAugment=trainSlideData.getparseFunctionNormal(),
        parseFunction=trainSlideData.getparseFunctionNormal())

    train_em.emtrain(trainSlideData,
                     valSlideData,
                     old_em_savepath,
                     em_train_savepath,
                     BATCH_SIZE,
                     initial_epochnum=initialEpoch,
                     model_name=modelName,
                     spatial_smoothing=SPATIALSMOOTHING,
                     do_augment=True,
                     num_epochs=100,
                     dropout_ratio=DROPOUT_RATIO,
                     learning_rate=LEARNING_RATE,
                     sanity_check=False,
                     logfile_path=logfile_path,
                     logreg_savepath=logreg_savepath,
                     runName=runName,
                     netAcc=None,
                     buildNet=lnet.getLymphNet)

    print("Data collected.")
Пример #4
0
import data_tf
import tensorflow as tf
import dataset
import lymphoma.l_data as ldata

labelEncoder = ldata.labelencoder()
dataPath = "/home/oole/data_lymphoma/"
trainSlideData = ldata.collect_data(dataPath)
trainSlideData.setLabelEncoder(labelEncoder)

with tf.Session() as sess:
    batchSize = 64

    slideDataset = dataset.img_dataset_augment(
        trainSlideData.getSlideList()[0],
        batch_size=batchSize,
        shuffle_buffer_size=None,
        shuffle=False,
        getlabel=ldata.getlabel,
        labelEncoder=labelEncoder,
        parseFunctionAugment=ldata.l_image_augment)
    iterator = tf.data.Iterator.from_structure(slideDataset.output_types,
                                               slideDataset.output_shapes)

    iterator_ops = []
    for slide in trainSlideData.getSlideList():
        slideDataset = dataset.img_dataset_augment(
            slide,
            batch_size=batchSize,
            shuffle_buffer_size=None,
            shuffle=False,