示例#1
0
def launch(data_root, output_path, training_iters, epochs, restore, layers, features_root):
    generator = Generator(572, data_root)
    
    data, label = generator(1)
    weights = None#(1/3) / (label.sum(axis=2).sum(axis=1).sum(axis=0) / data.size)
    
    net = unet.Unet(channels=generator.channels, 
                    n_class=generator.n_class, 
                    layers=layers, 
                    features_root=features_root,
                    add_regularizers=True,
                    class_weights=weights,
#                     filter_size=5
                    )
    
    path = output_path if restore else create_training_path(output_path)
#     trainer = unet.Trainer(net, optimizer="momentum", opt_kwargs=dict(momentum=0.2))
    trainer = unet.Trainer(net, optimizer="adam", opt_kwargs=dict(beta1=0.91))
    path = trainer.train(generator, path, 
                         training_iters=training_iters, 
                         epochs=epochs, 
                         dropout=0.5, 
                         display_step=2, 
                         restore=restore)
     
    prediction = net.predict(path, data)
     
    print("Testing error rate: {:.2f}%".format(unet.error_rate(prediction, util.crop_to_shape(label, prediction.shape))))
    
#     import numpy as np
#     np.save("prediction", prediction[0, ..., 1])
    
    img = util.combine_img_prediction(data, label, prediction)
    util.save_image(img, "prediction.jpg")
示例#2
0
def load_and_predict(data):
    """
    Return UNet predicted output and SEEK's sum_threshold output (check RFI
    mitigation page in SEEK docs)
    """

    # Define Unet
    unet = unet.Unet(
        channels=data_provider.channels,
        n_class=data_provider.n_class,
        layers=3,
        features_root=64,
        cost_kwargs=dict(regularizer=0.001),
    )

    # Path to cpkt file of trained Unet
    path = "./unet_trained_bgs_example_data/model.cpkt"

    # Predict using Unet
    data_provider = DataProvider(600, data)
    x_test, y_test = data_provider(1)
    unet_predict = unet.predict(path, data)

    fig, ax = plt.subplots(1, 3, figsize=(12, 4))
    ax[0].imshow(x_test[0, ..., 0], aspect="auto")
    ax[1].imshow(y_test[j, ..., 1], aspect="auto")
    ax[2].imshow(unet_predict[0, ..., 1], aspect="auto", cmap="gray")
    fig.savefig("pred" + str(1) + ".png")

    # Predict using SEEK's sum_threshold

    with h5py.File("data", "r") as fp:
        timeord = fp["P/Phase1"].value
示例#3
0
def runUnet(data, model, chan, classes, layers, features):
    message = request.get_json(force=True)
    img = decode(data["data"])
    img = np.array(img, np.float32)

    net = unet.Unet(channels=chan,
                    n_class=classes,
                    layers=layers,
                    features_root=features)

    print(img)

    ny = img.shape[0]
    nx = img.shape[1]

    img = img.reshape(1, ny, nx, 1)
    img -= np.amin(img)
    img /= np.amax(img)

    prediction = net.predict(model, img)

    prediction = util.expand_to_shape(prediction, [1, ny, nx, classes])

    mask = prediction[0, ..., 1] > 0.1

    data["data"] = encode(mask)

    print(data)

    return data
示例#4
0
def train(args):
    # preparing data loading
    data_provider = image_util.ImageDataProvider(args.data_dir,
                                                 n_class=args.classes,
                                                 class_colors=[0, 255, 127])

    # setup & training
    net = unet.Unet(layers=args.layers,
                    features_root=args.features_root,
                    channels=args.channels,
                    n_class=args.classes)
    trainer = unet.Trainer(net)
    total_parameters = 0
    for variable in tf.trainable_variables():
        # shape is an array of tf.Dimension
        shape = variable.get_shape()
        variable_parameters = 1
        for dim in shape:
            variable_parameters *= dim.value
        total_parameters += variable_parameters
    print("Total number of parameters:{0}".format(total_parameters))
    trainer.train(data_provider,
                  args.output_path,
                  training_iters=args.training_iters,
                  epochs=args.num_epochs,
                  write_graph=args.write_graph,
                  restore=args.restore)
示例#5
0
def launch(data_root, output_path, training_iters, epochs, restore, layers, features_root):
    print("Using data from: %s"%data_root)
    data_provider = DataProvider(600, glob.glob(data_root+"/*"))
    
    net = unet.Unet(channels=data_provider.channels, 
                    n_class=data_provider.n_class, 
                    layers=layers, 
                    features_root=features_root,
                    add_regularizers=True,
#                     filter_size=5
                    )
    
    path = output_path if restore else create_training_path(output_path)
    trainer = unet.Trainer(net, optimizer="momentum", opt_kwargs=dict(momentum=0.2))
    path = trainer.train(data_provider, path, 
                         training_iters=training_iters, 
                         epochs=epochs, 
                         dropout=0.5, 
                         display_step=2, 
                         restore=restore)
     
    x_test, y_test = data_provider(1)
    prediction = net.predict(path, x_test)
     
    print("Testing error rate: {:.2f}%".format(unet.error_rate(prediction, util.crop_to_shape(y_test, prediction.shape))))
    
#     import numpy as np
#     np.save("prediction", prediction[0, ..., 1])
    
    img = util.combine_img_prediction(x_test, y_test, prediction)
    util.save_image(img, "prediction.jpg")
示例#6
0
def test(model_dir, dataset_csv, working_dir, force=False, supervisely=False):

    # create folder name
    output_dir = os.path.join(
        working_dir, s.stages[5], 'M' + os.path.basename(model_dir) + 'D' +
        os.path.splitext(os.path.basename(dataset_csv))[0])

    # only retest if necessary
    if not os.path.exists(output_dir) or force:
        # delete existing
        if force and os.path.exists(output_dir):
            try:
                shutil.rmtree(output_dir)
            except Exception as e:
                print(e)

        # make directory
        os.makedirs(output_dir)

        net = unet.Unet(
            channels=s.network['channels'],
            n_class=s.network['classes'],
            layers=s.network['layers'],
            features_root=s.network['features_root'],
            cost_kwargs=dict(class_weights=s.network['class_weights']))
        # Run prediction
        net.predictAll(model_path=os.path.join(model_dir, 'model.cpkt'),
                       dataset_path=dataset_csv,
                       output_dir=output_dir,
                       roles=['test'],
                       supervisely=supervisely)  # supervisely is added
    else:
        print(os.path.basename(output_dir), ' already exists. Skipping.')
def launch(data_root, output_path, training_iters, epochs, restore, layers,
           features_root):
    print("Using data from: %s" % data_root)
    data_provider = ultrasound_util.DataProvider(data_root + "/*.tif",
                                                 a_min=0,
                                                 a_max=210)
    net = unet.Unet(
        channels=data_provider.channels,
        n_class=data_provider.n_class,
        layers=layers,
        features_root=features_root,
        cost="dice_coefficient",
    )

    path = output_path if restore else create_training_path(output_path)
    trainer = unet.Trainer(net, norm_grads=True, optimizer="adam")
    path = trainer.train(data_provider,
                         path,
                         training_iters=training_iters,
                         epochs=epochs,
                         dropout=0.5,
                         display_step=2,
                         restore=restore)

    x_test, y_test = data_provider(1)
    prediction = net.predict(path, x_test)

    print("Testing error rate: {:.2f}%".format(
        unet.error_rate(prediction, util.crop_to_shape(y_test,
                                                       prediction.shape))))
示例#8
0
def launch(data_root, output_path, training_iters, epochs, restore, layers,
           features_root):
    print("Using data from: %s" % data_root)
    data_provider = DataProvider(600, glob.glob(data_root + "/*"))

    net = unet.Unet(
        channels=data_provider.channels,
        n_class=data_provider.n_class,
        layers=layers,
        features_root=features_root,
        cost_kwargs=dict(regularizer=0.001),
    )

    path = output_path if restore else create_training_path(output_path)
    trainer = unet.Trainer(net,
                           optimizer="momentum",
                           opt_kwargs=dict(momentum=0.2))
    path = trainer.train(data_provider,
                         path,
                         training_iters=training_iters,
                         epochs=epochs,
                         dropout=0.5,
                         display_step=2,
                         restore=restore)

    x_test, y_test = data_provider(1)
    prediction = net.predict(path, x_test)

    print("Testing error rate: {:.2f}%".format(
        unet.error_rate(prediction, util.crop_to_shape(y_test,
                                                       prediction.shape))))
示例#9
0
def launch(data_root, output_path, training_iters, epochs, restore, layers,
           features_root):
    data_provider = DataProvider(572, data_root)

    data, label = data_provider(1)
    weights = None  #(1/3) / (label.sum(axis=2).sum(axis=1).sum(axis=0) / data.size)

    net = unet.Unet(
        channels=data_provider.channels,
        n_class=data_provider.n_class,
        layers=layers,
        features_root=features_root,
        cost_kwargs=dict(regularizer=0.001, class_weights=weights),
    )

    path = output_path if restore else util.create_training_path(output_path)

    trainer = unet.Trainer(net, optimizer="adam", opt_kwargs=dict(beta1=0.91))
    path = trainer.train(data_provider,
                         path,
                         training_iters=training_iters,
                         epochs=epochs,
                         dropout=0.5,
                         display_step=2,
                         restore=restore)

    prediction = net.predict(path, data)

    print("Testing error rate: {:.2f}%".format(
        unet.error_rate(prediction, util.crop_to_shape(label,
                                                       prediction.shape))))
示例#10
0
def main():

    # input training and test datasets
    train_data = image_util.ImageDataProvider(
        search_path='RoadDetection_Train_Images', n_class=2)

    # instantiate U-net (best results: layers=5, feature_roots=64, batch_size=2, epochs=50, training_iters=64)
    net = unet.Unet(layers=4,
                    n_class=train_data.n_class,
                    channels=train_data.channels,
                    features_root=48,
                    cost='dice_coefficient',
                    cost_kwargs={'regularizer': 0.01})

    trainer = unet.Trainer(net,
                           batch_size=2,
                           verification_batch_size=4,
                           optimizer="momentum",
                           opt_kwargs=dict(momentum=0.5))

    # path = trainer.train(data_provider=train_data, output_path="./unet_trained", training_iters=32,  epochs=1, display_step=2)
    trainer.train(data_provider=train_data,
                  output_path="./unet_trained",
                  training_iters=64,
                  epochs=50,
                  dropout=0.75,
                  display_step=2)

    print('Process completed.')
示例#11
0
def train(dataset_csv, working_dir, appendum='', force=False):

    # create folder name
    foldername = os.path.join(
        working_dir, s.stages[4], '__'.join([
            os.path.splitext(os.path.basename(dataset_csv))[0],
            'ly' + str(s.network['layers']) + 'ftr' + str(s.network['features_root']) + appendum, ''
            ]))

    # only retrain if necessary
    if not os.path.exists(foldername) or force:
        generator = image_util.ImageDataProvider(
            dataset_path=dataset_csv,
            roles=['train'],
            shuffle_data=True,
            a_min=None,
            a_max=None,
            n_class=s.network['classes'],
            n_channels=s.network['channels'])  # add all options and put shuffle data = True

        net = unet.Unet(
            channels=s.network['channels'],
            n_class=s.network['classes'],
            layers=s.network['layers'],
            features_root=s.network['features_root'],
            cost_kwargs=dict(class_weights=s.network['class_weights'])
        )
        trainer = unet.Trainer(net, optimizer=s.train['optimizer'], batch_size=s.train['batch_size'])

        trainer.train(generator, foldername, training_iters=s.train['training_iters'], epochs=s.train['epochs'],
                      display_step=s.train['display_step'], dropout=s.train['dropout'],
                      restore=False, write_graph=True)
    else:
        print(os.path.basename(foldername), ' already exists. Skipping.')
示例#12
0
    def __init__(self, model_path="./unet_trained/model.ckpt"):
        self.net = unet.Unet(channels=3, n_class=3, layers=3, features_root=16)

        init = tf.global_variables_initializer()
        self.sess = tf.Session()
        self.sess.run(init)
        self.net.restore(self.sess, model_path)

        self.y_dummy = np.empty((1, 400, 288, 3))
示例#13
0
文件: skeleton.py 项目: mayurdb/U-Net
    def __init__(self, data_dir):
        """
			data_directory : path like /home/rajat/nnproj/dataset/
			includes the dataset folder with '/'
			Initialize all your variables here
			"""
        self.path = data_dir
        self.net = unet.Unet(layers=3, features_root=64, channels=1, n_class=2)
        self.trainer = unet.Trainer(self.net)
        self.count = 0
示例#14
0
 def __init__(self):
     self.net = unet.Unet(channels=1, n_class=2, layers=5, features_root=16)
     base = dirname(dirname(__file__))
     self.imagepath = join(base, "testimages")
     self.models = ["Fish", "Flower", "Gravel", "Sugar"]
     self.testimages = os.listdir(self.imagepath)
     self.nx = 350
     self.ny = 525
     self.mt = {"Fish": .5, "Gravel": .38263, "Flower": .4999, "Sugar": .39}
     self.cropx = 196
     self.cropy = 192
示例#15
0
def predict(img):

    if img.shape == 2:
        img = img[np.newaxis, :, :, np.newaxis]

    if img.shape == 3:
        img = img[np.newaxis, :, :, :]

    net = unet.Unet(channels=1, n_class=2, layers=4, features_root=64)
    prediction = net.predict('./pre_trained/model.ckpt', img)

    return prediction
示例#16
0
def infer_test():
    LAYERS = 3
    pkl_fname = "data/preprocess/stage1_2_test_set.pkl"
    with open(pkl_fname, "rb") as f:
        ds = pickle.load(f)

    net = unet.Unet(
        channels=1,
        n_class=2,
        cost='cross_entropy',
        layers=LAYERS,
        features_root=64,
        cost_kwargs=dict(regularizer=0.001),
    )
    net.load_weight("log/20180416/model.cpkt")

    images_code = {}
    for i, (image_id, value) in enumerate(ds.items()):
        # image_part = value[0]['img']
        # prob = infer_part(net, image_part)
        #
        # mask = prob > 0.5
        # mask_ex = apply_morphology(mask)
        # fig, ax = plt.subplots(1, 3, sharex=True, sharey=True, figsize=(12,5))
        # ax[0].imshow(image_part, aspect="auto")
        # ax[1].imshow(mask_ex, aspect="auto")
        # ax[2].imshow(mask, aspect="auto")
        # ax[0].set_title("Input")
        # ax[1].set_title("Ground truth")
        # ax[2].set_title("Prediction")
        # fig.tight_layout()
        # plt.show()
        # if i > 3:
        #     break

        whole_image, whole_prob = infer_image(net, image_id, value)
        code = prob_to_rles(whole_prob)
        logging.info("code: %s", code)
        images_code[image_id] = code
        # if i > 5:
        #     break

        # fig, ax = plt.subplots(1, 3, sharex=True, sharey=True, figsize=(12,5))
        # ax[0].imshow(whole_image, aspect="auto")
        # ax[2].imshow(whole_pred, aspect="auto")
        # ax[0].set_title("Input")
        # ax[1].set_title("Ground truth")
        # ax[2].set_title("Prediction")
        # fig.tight_layout()
        # plt.show()

    return images_code
示例#17
0
def main():

    dp = DataProvider(batchSize=BATCH_SIZE, validationSize=VALIDATION_SIZE)
    dp.readData()
    print("DONE READING DATA")
    # calculate num of iterations
    iters = dp.getTrainSize() // BATCH_SIZE
    # unet
    net = unet.Unet(channels = 1, n_class = 2, layers = 3,\
     features_root = 16, cost="cross_entropy", cost_kwargs={})

    # # trainer
    # options = {"momentum":0.2, "learning_rate":0.2,"decay_rate":0.95}

    # trainer = unet.Trainer(net, optimizer="momentum",plotter = plot, opt_kwargs=options )
    # # train model
    # path = trainer.train(dp, OUTPUT_PATH,training_iters = iters,epochs=EPOCHS,\
    # 	dropout=DROPOUT_KEEP_PROB, display_step = DISPLAY_STEP,restore = restore)

    path = os.getcwd() + "/retinaModel/model.cpkt"

    x_test, y_test = dp.getTestData(3, crop=False)
    prediction = net.predict(path, x_test)

    # # sanity check
    # fig, ax = plt.subplots(3, 3)
    # ax[0][0].imshow(x_test[0,:,:,0],cmap=plt.cm.gray)
    # ax[0][1].imshow(y_test[0,:,:,1],cmap=plt.cm.gray)
    # ax[0][2].imshow(np.argmax(prediction[0,...],axis =2),cmap=plt.cm.gray)
    # ax[1][0].imshow(x_test[1,:,:,0],cmap=plt.cm.gray)
    # ax[1][1].imshow(y_test[1,:,:,1],cmap=plt.cm.gray)
    # ax[1][2].imshow(np.argmax(prediction[1,...],axis =2),cmap=plt.cm.gray)
    # ax[2][0].imshow(x_test[2,:,:,0],cmap=plt.cm.gray)
    # ax[2][1].imshow(y_test[2,:,:,1],cmap=plt.cm.gray)
    # ax[2][2].imshow(np.argmax(prediction[2,...],axis =2),cmap=plt.cm.gray)
    # plt.show()

    # save test result as image
    # check for path
    if not os.path.lexists(OUTPUT_PATH):
        os.makedirs(OUTPUT_PATH)

    sampleSize = 3
    img = util.combine_img_prediction(x_test[0:sampleSize,...], y_test[0:sampleSize,...]\
     , prediction[0:sampleSize,...])

    util.save_image(
        img, "%s/%s.jpg" % (os.getcwd() + "/" + "testResults", "testSample"))

    print("Testing error rate: {:.2f}%".format(
        unet.error_rate(prediction, util.crop_to_shape(y_test,
                                                       prediction.shape))))
示例#18
0
def train(gen):
    net = unet.Unet(channels=gen.channels,
                    n_class=gen.n_class,
                    layers=5,
                    features_root=16)
    trainer = unet.Trainer(net,
                           optimizer="momentum",
                           opt_kwargs=dict(momentum=0.2))
    trainer.train(gen,
                  "./unet_trained/%s" % (gen.labelclass),
                  training_iters=32,
                  epochs=100,
                  display_step=2)
示例#19
0
def launch(data_root,
           roidictfile,
           output_path,
           training_iters,
           epochs,
           restore,
           layers,
           features_root,
           val=None):

    with open(roidictfile) as fh:
        roidict = yaml.load(fh)
    if val:
        val_data_provider = ImageDataProvider(val, roidict)

    data_provider = ImageDataProvider(data_root, roidict)

    data, label = data_provider(1)
    # make sure the labels are not flat
    assert np.any(
        np.asarray([label[-1, ..., nn].var()
                    for nn in range(label.shape[-1])]) > 0)

    weights = None  #(1/3) / (label.sum(axis=2).sum(axis=1).sum(axis=0) / data.size)

    net = unet.Unet(
        channels=data_provider.channels,
        n_class=data_provider.n_class,
        layers=layers,
        features_root=features_root,
        cost_kwargs=dict(regularizer=0.001, class_weights=weights),
    )

    path = output_path if restore else create_training_path(output_path)
    trainer = unet.Trainer(net, optimizer="adam", opt_kwargs=dict(beta1=0.91))
    path = trainer.train(data_provider,
                         path,
                         training_iters=training_iters,
                         epochs=epochs,
                         dropout=0.5,
                         display_step=2,
                         restore=restore,
                         val_data_provider=val_data_provider)

    prediction = net.predict(path, data)

    print("Testing error rate: {:.2f}%".format(
        unet.error_rate(prediction, util.crop_to_shape(label,
                                                       prediction.shape))))
示例#20
0
def train():
    net = unet.Unet(channels=1, n_class=2, layers=4, features_root=64)
    trainer = unet.Trainer(net,
                           batch_size=8,
                           verification_batch_size=4,
                           optimizer='adam')
    #data_provider = image_util.SimpleDataProvider(X_test, y_test)
    path = trainer.train(X_test,
                         y_test,
                         X_test,
                         y_test,
                         './pre_trained',
                         training_iters=32,
                         epochs=50,
                         dropout=0.5,
                         display_step=8,
                         restore=True)  #restore = True
示例#21
0
def run(sequence_dir, working_dir, force=False, model_dir=None):
    multitime = os.path.basename(sequence_dir).split('_', maxsplit=1)[1]
    camera = os.path.basename(sequence_dir).split('_', maxsplit=1)[0]
    # If no model is provided, find one
    if model_dir is None:
        # Get appropriate model, has to have same camera and multitime
        model_dirs = glob.glob(
            os.path.join(working_dir, s.stages[4],
                         'cam1_intra_' + multitime + '*'))
    else:
        model_dirs = [model_dir]

    for model_dir in model_dirs:
        # Create output dir
        output_dir = os.path.join(
            working_dir, s.stages[6],
            os.path.basename(model_dir) + os.path.basename(sequence_dir))
        # only retest if necessary
        if not os.path.exists(output_dir) or force:
            # delete existing
            if force and os.path.exists(output_dir):
                try:
                    shutil.rmtree(output_dir)
                except Exception as e:
                    print(e)

            # make directory
            os.makedirs(output_dir)

            net = unet.Unet(
                channels=s.network['channels'],
                n_class=s.network['classes'],
                layers=s.network['layers'],
                features_root=s.network['features_root'],
                cost_kwargs=dict(class_weights=s.network['class_weights']))
            # Run prediction
            net.predict_no_label(
                model_path=os.path.join(model_dir, 'model.cpkt'),
                images_dir=sequence_dir,
                output_dir=output_dir,
            )
        else:
            print(os.path.basename(output_dir), ' already exists. Skipping.')
def train(raw_path, label_path, model_path):
    data = read_dicom(raw_path)
    label = read_dicom(label_path, True)
    # 创建训练集
    data_provider = SimpleDataProvider(data, label, n_class=2, channels=1)

    # 构建网络
    net = unet.Unet(layers=3,
                    features_root=32,
                    channels=1,
                    n_class=2,
                    summaries=False)
    trainer = unet.Trainer(net,
                           batch_size=2,
                           opt_kwargs={'learning_rate': 0.02})
    path = trainer.train(data_provider,
                         model_path,
                         training_iters=64,
                         epochs=100)
    print(path)
示例#23
0
def launch(data_root, output_path, training_iters, epochs, restore, layers,
           features_root):
    print("Using data from: %s" % data_root)

    if not os.path.exists(data_root):
        raise IOError("Kaggle Ultrasound Dataset not found")

    data_provider = DataProvider(search_path=data_root + "/*.tif",
                                 mean=100,
                                 std=56)

    net = unet.Unet(
        channels=data_provider.channels,
        n_class=data_provider.n_class,
        layers=layers,
        features_root=features_root,
        #cost="dice_coefficient",
    )

    path = output_path if restore else util.create_training_path(output_path)

    trainer = unet.Trainer(net,
                           batch_size=1,
                           norm_grads=False,
                           optimizer="adam")
    path = trainer.train(data_provider,
                         path,
                         training_iters=training_iters,
                         epochs=epochs,
                         dropout=0.5,
                         display_step=2,
                         restore=restore)

    x_test, y_test = data_provider(1)
    prediction = net.predict(path, x_test)

    print("Testing error rate: {:.2f}%".format(
        unet.error_rate(prediction, util.crop_to_shape(y_test,
                                                       prediction.shape))))
示例#24
0
def main():
    restore = False
    # adding measurements (loggers) to plotter
    logs = []
    logs.append(logger(label="train loss", color="Red"))
    logs.append(logger(label="validation loss", color="Blue"))
    logs.append(logger(label="validation error", color="Black"))
    logs.append(logger(label="dice score", color="Green"))
    plot = plotter(logs, EPOCHS, os.getcwd() + "/" + "plots")

    # data provider
    dp = DataProviderTiled(splits=12,
                           batchSize=BATCH_SIZE,
                           validationSize=VALIDATION_SIZE)
    dp.readData()
    print("DONE READING DATA")
    # calculate num of iterations
    iters = dp.getTrainSize() // BATCH_SIZE

    # unet
    opt = {"class_weights": [0.99, 0.01]}
    net = unet.Unet(channels = 1, n_class = 2, layers = 3,\
     features_root = 16, cost="cross_entropy", cost_kwargs={})

    # trainer
    options = {"momentum": 0.2, "learning_rate": 0.2, "decay_rate": 0.95}

    trainer = unet.Trainer(net,
                           optimizer="momentum",
                           plotter=plot,
                           opt_kwargs=options)
    # train model
    path = trainer.train(dp, OUTPUT_PATH,training_iters = iters,epochs=EPOCHS,\
     dropout=DROPOUT_KEEP_PROB, display_step = DISPLAY_STEP,restore = restore)

    # plot results
    plot.saveLoggers()
    plot.plotLoggers()
    print("DONE")
def lung_segment(path, model_path):
    name = ImageSeriesReader_GetGDCMSeriesFileNames(path)
    raw = ReadImage(name)
    raw = sitk.Cast(raw, sitk.sitkFloat32)
    raw = sitk.IntensityWindowing(raw, -1024, 1024, 0, 1.0)
    arr = GetArrayFromImage(raw)
    prediction = np.zeros_like(arr)
    arr = arr[..., np.newaxis]
    net = unet.Unet(layers=3,
                    features_root=32,
                    channels=1,
                    n_class=2,
                    summaries=False)
    pre = net.predict(model_path, arr, 4)
    pre = np.argmax(pre, -1)
    prediction[:, 20:492, 20:492] = pre

    stride = 50
    index = None
    for z in range(0, prediction.shape[0], stride):
        for y in range(154, prediction.shape[1], stride):
            for x in range(105, prediction.shape[2], stride):
                patch = prediction[z:z + stride, y:y + stride, x:x + stride]
                ratio = patch.mean()
                if ratio > 0.95:
                    index = [z + stride // 2, y + stride // 2, x + stride // 2]
                    break
            if index:
                break
        if index:
            break
    index.reverse()
    # print(index)
    prediction = sitk.GetImageFromArray(prediction)
    prediction.CopyInformation(raw)
    prediction = sitk.Cast(prediction, sitk.sitkUInt8)
    prediction = sitk.ConnectedThreshold(prediction, [index], 1, 1, 1)
    return prediction
示例#26
0
def main():
    np.random.seed(12345)
    LAYERS = 3
    pkl_fname = "data/preprocess/stage1_train_set_rgb.pkl"
    images, masks = get_dataset(pkl_fname)
    logging.info("read train set: %s, %s", images.shape, masks.shape)
    logging.info("image:[%s, %s], mask:[%s, %s]", np.max(images), np.min(images), np.max(masks), np.min(masks))

    pred_size, offset = unet_size(256, LAYERS)
    logging.info("pred_size: %d, offset: %d", pred_size, offset)
    images = padding_array(images, offset, default_val=0.0)
    masks = padding_array(masks, offset, default_val=False)
    logging.info("shape after padded: %s, %s", images.shape, masks.shape)

    # images = normalize(images)
    # test_data(images, masks, 1679)
    data_provider = image_util.SimpleDataProvider(images, masks, channels=3)
    logging.info("data_provider.channels: %s, data_provider.n_class: %s", data_provider.channels, data_provider.n_class)

    # test_data_provider(data_provider)
    net = unet.Unet(channels=data_provider.channels,
                    n_class=data_provider.n_class,
                    cost='cross_entropy',
                    layers=LAYERS,
                    features_root=64,
                    cost_kwargs=dict(regularizer=0.001),
                    )
    batch_size = 8
    net.verification_batch_size = batch_size * 2
    training_iters = (images.shape[0]-1) / batch_size + 1
    logging.info("batch_size: %s, iters: %s", batch_size, training_iters)

    trainer = unet.Trainer(net, batch_size=batch_size, optimizer="momentum",
                           opt_kwargs=dict(momentum=0.9, learning_rate=0.01))
    path = trainer.train(data_provider, "log/20180416-1",
                         training_iters=training_iters, epochs=20, display_step=2)
示例#27
0
import os
os.environ["CUDA_VISIBLE_DEVICES"] = '2'
from __future__ import division, print_function
get_ipython().run_line_magic('matplotlib', 'inline')
import matplotlib.pyplot as plt
import matplotlib
import numpy as np

from tf_unet import image_util
from tf_unet import unet
from tf_unet import util

# In[2]:

net = unet.Unet(channels=3, n_class=2, layers=2, features_root=300)
trainer = unet.Trainer(net,
                       optimizer="momentum",
                       opt_kwargs=dict(momentum=0.2))

# In[5]:

data_provider = image_util.ImageDataProvider(
    "/mnt/ccipd_data/CCF/ccfMaskTmp/training/*.png",
    data_suffix='_img.png',
    mask_suffix='_mask.png')

# In[4]:

path = trainer.train(data_provider,
                     "./unet_trained",
示例#28
0
文件: predict.py 项目: ZhH-17/tf_unet
}
label_cmap_list = np.array(list(label_cmap.values()))

# data_provider = ImageDataSingle("./data/cj_cut.png", "./data/cj_cut_gt.png")
# data_provider = ImageDataSingle("./data/cj_test1.png", "./data/cj_test1_gt.png")
data_provider = ImageDataSingle(
    "./data/cj_0402_1420/cj_right_all_20200402_1420.png",
    "./data/cj_0402_1420/cj_right_all_20200402_1420_gt.png")

output_path = "./log_l3_best"
n_class = 7
class_weights = np.ones(n_class) * 5
class_weights[0] = 0.5
net = unet.Unet(data_provider.channels,
                n_class,
                layers=3,
                features_root=32,
                cost_kwargs={"class_weights": class_weights})


def predict_panoroma(path, data_provider):
    pass


def stack_imgs(imgs, num_row, num_col):
    '''
    concatenate image slices to a panoroma,
    imgs: image slices should be sorted by row first
    '''
    imgs_row = []
    for i in range(num_row):
示例#29
0
文件: u-net.py 项目: vafaei-ar/Ngene
dp = rfc.DataProvider(ny=ws,
                      a_min=0,
                      a_max=200,
                      files=files_list,
                      label_name='gt_mask',
                      n_class=2)
_, nx, ny, _ = dp(1)[0].shape

training_iters = 100
epochs = 300
model_dir = './models/unet_' + str(features_root) + '_' + str(threshold)

net = unet.Unet(channels=dp.channels,
                n_class=dp.n_class,
                layers=3,
                features_root=features_root,
                cost_kwargs=dict(regularizer=0.001))

if not args.test:
    trainer = unet.Trainer(net,
                           optimizer="momentum",
                           opt_kwargs=dict(momentum=0.2))
    path = trainer.train(dp,
                         model_dir,
                         training_iters=training_iters,
                         epochs=epochs,
                         dropout=0.5,
                         display_step=1000000)

else:
示例#30
0
from tf_unet import util


if __name__ == '__main__':
    nx = 572
    ny = 572
     
    training_iters = 20
    epochs = 100
    dropout = 0.75 # Dropout, probability to keep units
    display_step = 2
    restore = False
 
    generator = image_gen.get_image_gen_rgb(nx, ny, cnt=20)
    
    net = unet.Unet(channels=generator.channels, n_class=generator.n_class, layers=3, features_root=16)
    
    trainer = unet.Trainer(net, optimizer="momentum", opt_kwargs=dict(momentum=0.2))
    path = trainer.train(generator, "./unet_trained", 
                         training_iters=training_iters, 
                         epochs=epochs, 
                         dropout=dropout, 
                         display_step=display_step, 
                         restore=restore)
     
    x_test, y_test = generator(4)
    prediction = net.predict(path, x_test)
     
    print("Testing error rate: {:.2f}%".format(unet.error_rate(prediction, util.crop_to_shape(y_test, prediction.shape))))
    
    import numpy as np