コード例 #1
0
def predict( flag ):
    
    flag.network = "unet"
    flag.data_augmentation = 0
    flag.primitive_augmentation = "rotation_3"
    flag.epochs = 500
    flag.learning_rate = 1e-5
    flag.batch_size = 32
    flag.loss_metric = "categorical_crossentropy"

    img_rows = flag.image_size
    img_cols = flag.image_size

    print('-'*30)
    print('Loading testing data...')
    print('-'*30)
    
    imgs_train, imgs_test, imgs_mask_train_all, imgs_mask_test_all, imgs_id_test, imgs_id_train = loadDataParams( flag )
    images = imgs_test
    images = preprocess(images, img_rows, img_cols )
   
    regionList = flag.region_list
    nClasses = len(regionList)+1

    masks = toMultiLabelMask( imgs_mask_test_all, nClasses, regionList, img_rows, img_cols  )

    metrics = getMetricFunctions()
    model = getModel( flag.network, nClasses, flag.optimizer, flag.activation, flag.loss_metric, metrics, flag.learning_rate, flag.image_size )
    model.load_weights( flag.load_weights_file )
    #model = load_model( flag.load_model_weights_file )

    pred = model.predict( images, batch_size=flag.batch_size, verbose=0, steps=None)
    
    return images, pred, masks
コード例 #2
0
def toMultiLabelMask( masks_all, nClasses, regionList, img_rows, img_cols ):

    s = masks_all[regionList[0]].shape
    nImages = s[0]
    masks = np.zeros( (nImages, img_rows, img_cols, nClasses) )
    masks[:,:,:,0] = np.ones( (nImages, img_rows, img_cols) )
    for regionIndex in range(len(regionList)):
        # put the mask layer of the region index to 1
        masks_region = masks_all[regionList[regionIndex]]
        masks_region = preprocess(masks_region, img_rows, img_cols )
        temp = masks_region[:,:,:,0]
        masks[:,:,:,regionIndex+1] = temp
        # and the background layer to 0, for every region pixel
        masks[:,:,:,0] = (1 - temp) * masks[:,:,:,0]

    return masks
コード例 #3
0
def train( flag ):
    
    K.clear_session()
    
    img_rows = flag.image_size
    img_cols = flag.image_size
    n_epochs = flag.epochs

    print('-'*30)
    print('Loading train data...')
    print('-'*30)
    
    imgs_train, imgs_test, imgs_mask_train_all, imgs_mask_test_all, imgs_id_test, imgs_id_train = loadDataParams( flag )
    images = imgs_train#np.expand_dims( imgs_train, axis = 3 )
    #images = np.expand_dims( images, axis = 3 )
    images = preprocess(images, img_rows, img_cols )
    masks_all = imgs_mask_train_all
    #loadDataParams( flag )

    print('-'*30)
    print('Fuse masks to single multi-label image')
    print('-'*30)
    regionList = flag.region_list
    regionIdList = range(1, len(regionList)+1)
    nClasses = len(regionList)+1
    #masks = masks_all[regionList[0]]
    s = masks_all[regionList[0]].shape
    nImages = s[0]
    masks = np.zeros( (nImages, img_rows, img_cols, nClasses) )
    masks_reshape = np.zeros( (nImages, img_rows * img_cols, nClasses) )
    #one_temp = np.ones( (s[0], img_rows, img_cols) )
    masks[:,:,:,0] = np.ones( (nImages, img_rows, img_cols) )
    for regionIndex in range(len(regionList)):
        # put the mask layer of the region index to 1
        masks_region = masks_all[regionList[regionIndex]]
        masks_region = preprocess(masks_region, img_rows, img_cols )
        temp = masks_region[:,:,:,0]
        masks[:,:,:,regionIndex+1] = temp
        # and the background layer to 0, for every region pixel
        masks[:,:,:,0] = (1 - temp) * masks[:,:,:,0]
        # Reshape (we don't need/use this???)
        temp = temp.reshape((nImages,img_rows * img_cols))
        masks_reshape[:,:,regionIndex+1] = temp
    
    if len(flag.primitive_augmentation) > 0:
        print('-'*30)
        print('Primitive data augmentation for testing: ' + flag.primitive_augmentation)
        print('-'*30)
        images, masks = dataAugmentationPrimitive( flag, images, masks )
        s = masks.shape
        nImages = s[0]
    else:
        print('-'*30)
        print('No primitive data augmentation used')
        print('-'*30)
        

    print('-'*30)
    print('Save masks as images')
    print('-'*30)
    for imageIndex in range( nImages ):
        fileName = 'mask_sample_%d.tif' % ( imageIndex )
        filePath = os.path.join( flag.output_masks_feed_dir, fileName )
        maskData = np.transpose( masks[imageIndex, ...], (2, 0, 1) )
        maskData = np.expand_dims( maskData, axis = 3 )
        writeData( filePath, maskData )

    print('-'*30)
    print('Save (preprocessed) images as images')
    print('-'*30)
    for imageIndex in range( nImages ):
        fileName = 'image_sample_%d.tif' % ( imageIndex )
        filePath = os.path.join( flag.output_images_feed_dir, fileName )
        imageData = np.transpose( images[imageIndex, ...], (2, 0, 1) )
        imageData = np.expand_dims( imageData, axis = 3 )
        writeData( filePath, imageData )

    #masks = np.expand_dims( masks, axis = 3 )

    print('-'*30)
    print('Load the model')
    print('-'*30)
    metrics = getMetricFunctions()
    
    if ( flag.load_model_weights_file != "" ):
        model = load_model( flag.load_model_weights_file )
    elif ( flag.load_model_file != "" ):
        json_string = open( flag.load_model_file ).read()
        model = model_from_json( json_string )
    else:
        model = getModel( flag.network, nClasses, flag.optimizer, flag.activation, flag.loss_metric, metrics, flag.learning_rate, flag.image_size )

    if ( flag.load_weights_file != "" ):
        model.load_weights( flag.load_weights_file )

    model_checkpoint = ModelCheckpoint( os.path.join( flag.output_run_dir, 'weights.{epoch:03d}.h5'), period=n_epochs//10)

    show_pred_masks = trainCheck(flag)
    if flag.data_augmentation:
        #steps_per_epoch = len(train_generator) / flag.batch_size
        steps_per_epoch = 20
        images, imagesValidation = splitDataSet( images, 0.8 )
        masks, masksValidation = splitDataSet( masks, 0.8)
        train_generator = dataAugmentation( flag, images, masks )
        history = model.fit_generator( train_generator, 
                           validation_data = (imagesValidation, masksValidation), 
                          steps_per_epoch = steps_per_epoch,
                        epochs=n_epochs, verbose=1, shuffle=True,
                        callbacks=[model_checkpoint,show_pred_masks])
#        for e in range(flag.epochs):
#            print('Epoch', e)
#            batches = 0
#            for x_batch, y_batch in train_generator:
#                history = model.fit( x_batch[0], y_batch[0], validation_data = (imagesValidation, masksValidation), callbacks=[show_pred_masks])
#                print('batch ' + str(batches) + ' : do nothing, save something?')
#                #model.fit(x_batch, y_batch)
#                batches += 1
#                if batches >= len(images) / 32:
#                    # we need to break the loop by hand because
#                    # the generator loops indefinitely
#                    break
    
    else:
        #categorical_labels = to_categorical(int_labels, num_classes=None)
#        if model.loss is "categorical_crossentropy":
#            masks_cat = to_categorical(masks, num_classes=None)
        history = model.fit( images, masks, batch_size=flag.batch_size, epochs=n_epochs, verbose=1, shuffle=True,
                     validation_split=0.2, callbacks=[model_checkpoint,show_pred_masks])

    print('-'*30)
    print('Saving model and training data (weights)')
    print('-'*30)

    model.save( os.path.join( flag.output_models_weights_dir, flag.run_id + ".h5" ) )
    weights_path = os.path.join( flag.output_weights_dir, flag.run_id + ".h5" )
    model.save_weights( weights_path )
    json_path = os.path.join( flag.output_models_dir, flag.model_id + ".json" )
    json_string = model.to_json()
    with open( json_path, 'w') as json_file:
        json_file.write( json_string )

    plotVarList = getMetrics( history )
    showTrainingHistory( flag, history, plotVarList )
コード例 #4
0
def showData(flag):

    img_rows = flag.image_size
    img_cols = flag.image_size

    print('-' * 30)
    print('Loading train data...')
    print('-' * 30)

    imgs_train, imgs_test, imgs_mask_train_all, imgs_mask_test_all, imgs_id_test, imgs_id_train = loadDataParams(
        flag)
    images = imgs_train
    images = preprocess(images, img_rows, img_cols)
    masks_all = imgs_mask_train_all

    print('-' * 30)
    print('Fuse masks to single multi-label image')
    print('-' * 30)
    regionList = flag.region_list
    nClasses = len(regionList) + 1
    s = masks_all[regionList[0]].shape
    nImages = s[0]
    masks = np.zeros((nImages, img_rows, img_cols, nClasses))
    masks[:, :, :, 0] = np.ones((nImages, img_rows, img_cols))
    for regionIndex in range(len(regionList)):
        # put the mask layer of the region index to 1
        masks_region = masks_all[regionList[regionIndex]]
        masks_region = preprocess(masks_region, img_rows, img_cols)
        temp = masks_region[:, :, :, 0]
        masks[:, :, :, regionIndex + 1] = temp
        # and the background layer to 0, for every region pixel
        masks[:, :, :, 0] = (1 - temp) * masks[:, :, :, 0]

    print('-' * 30)
    print('Save masks as images')
    print('-' * 30)
    scaleIntensity = 1
    for imageIndex in range(nImages):
        fileName = 'mask_sample_%d.tif' % (imageIndex)
        filePath = os.path.join(flag.output_masks_feed_dir, fileName)
        maskData = np.transpose(masks[imageIndex, ...], (2, 0, 1))
        maskData = np.expand_dims(maskData, axis=3)
        writeData(filePath, maskData)

        y = masks[imageIndex, ...]
        y = mergeLabelImage(y).astype(np.float32)
        y /= y.max()
        x = images[imageIndex, ...].astype(np.float32)
        x = np.fmin(255.0 * np.ones(x.shape, np.float32), scaleIntensity * x)
        x = x.astype(np.uint8)
        x = np.squeeze(x)
        print(x.shape)
        #x /= x.max()
        #img = (x*255).astype( np.uint8 )
        imgColor = cv2.cvtColor(x, cv2.COLOR_GRAY2BGR)
        imgMaskOri = (y * 255).astype(np.uint8)
        imgMaskOriColor = cv2.applyColorMap(imgMaskOri, cv2.COLORMAP_JET)
        imgOverlay = cv2.addWeighted(imgColor, 0.9, imgMaskOriColor, 0.4, 0.0)
        output_path_mask_ori = os.path.join(
            flag.output_images_dir, 'mask_index-%04d.png' % (imageIndex))
        output_path_overlay = os.path.join(
            flag.output_plots_dir, 'overlay_index-%04d.png' % (imageIndex))
        output_path_image = os.path.join(flag.output_plots_dir,
                                         'image_index-%04d.png' % (imageIndex))
        cv2.imwrite(output_path_image, imgColor)
        cv2.imwrite(output_path_mask_ori, imgMaskOriColor)
        cv2.imwrite(output_path_overlay, imgOverlay)

    print('-' * 30)
    print('Save (preprocessed) images as images')
    print('-' * 30)
    for imageIndex in range(nImages):
        fileName = 'image_sample_%d.tif' % (imageIndex)
        filePath = os.path.join(flag.output_images_feed_dir, fileName)
        imageData = scaleIntensity * np.transpose(images[imageIndex, ...],
                                                  (2, 0, 1))
        imageData = np.expand_dims(imageData, axis=3)
        writeData(filePath, imageData)