Ejemplo n.º 1
0
def test_resnet3d_50(resnet3d_test):
    """Test 50."""
    K.set_image_data_format('channels_last')
    model = Resnet3DBuilder.build_resnet_50((224, 224, 224, 1), 1, 1e-2)
    resnet3d_test(model)
    K.set_image_data_format('channels_first')
    model = Resnet3DBuilder.build_resnet_50((1, 512, 512, 256), 1, 1e-2)
    resnet3d_test(model)
Ejemplo n.º 2
0
def test_resnet3d_50():
    K.set_image_data_format('channels_last')
    model = Resnet3DBuilder.build_resnet_50((224, 224, 224, 1), 2)
    model.compile(loss="categorical_crossentropy", optimizer="sgd")
    assert True, "Failed to build with tensorflow"

    K.set_image_data_format('channels_first')
    model = Resnet3DBuilder.build_resnet_50((1, 512, 512, 256), 2)
    model.compile(loss="categorical_crossentropy", optimizer="sgd")
    assert True, "Failed to build with theano"
Ejemplo n.º 3
0
copy(__file__, output_dir)

# Download the data from: /hpf/largeprojects/ccm/devin/plastics-data/???
datapath = "data/data.h5"
test_size = 0.20
X_train, X_test, Y_train, Y_test = create_train_test(datapath, test_size)


print ("number of training examples = " + str(X_train.shape[0]))
print ("number of test examples = " + str(X_test.shape[0]))
print ("X_train shape: " + str(X_train.shape))
print ("Y_train shape: " + str(Y_train.shape))
print ("X_test shape: " + str(X_test.shape))
print ("Y_test shape: " + str(Y_test.shape))

model = Resnet3DBuilder.build_resnet_50((128, 128, 128, 1), 3)
# model = Resnet3DBuilder.build((128, 128, 128, 1), 3, basic_block, [1, 1, 1, 1], reg_factor=1e-4)

# Uncomment the following block to do transfer learning!
model.layers[-1].name = "dense_resnet_1" # Rename final dense layer so correct number of output classes is used (3 instead of MNIST 10)
# MNIST build_resnet_18
# model.load_weights('/home/carsonmclean/dev/csc2541/csc2541/output/04-03-03:37/model.h5',
#                    by_name = True)
# MNIST [1,1,1,1]
# model.load_weights('/home/carsonmclean/dev/csc2541/csc2541/output/04-09-00:25/model.h5',
#                    by_name = True)

# ModelNet40/PointNet build_resnet_18 dim = 64
# model.load_weights('/home/carsonmclean/dev/csc2541/csc2541/output/04-09-17:40/model.h5',
#                    by_name = True)
# ModelNet40/PointNet build_resnet_18 dim = 128
Ejemplo n.º 4
0
        y_filename = os.path.join(y_path, filename)

        X = np.load(X_fileanme)
        y = np.load(y_filename)

        for i in range(X.shape[0] // batch_size):
            indexes = np.random.randint(X.shape[0], size=batch_size)
            yield X[indexes], y[indexes]

        del X
        del y

if __name__ == '__main__':
    # model, training_model = get_unet_model()
    # model, training_model = get_model()
    model = Resnet3DBuilder.build_resnet_50((64, 64, 16, 1), 2)
    training_model = keras.utils.multi_gpu_model(model)
    training_model.compile(optimizer=Adam(amsgrad=True), loss='binary_crossentropy', metrics=['accuracy'])
    model.summary()

    callbacks=[
        RedirectModel(keras.callbacks.ModelCheckpoint(
            os.path.join(
                './model_checkpoints',
                '{epoch:02d}.h5'
            ),
            verbose=1,
        ), model),
        keras.callbacks.TensorBoard(
            log_dir='./logs/' + datetime.datetime.now().strftime('%Y%m%d%H%M')
        ),
Ejemplo n.º 5
0
    **init_args['volume_image_data_generator']['val']['init'])

train_vol_loader = NPYDataLoader(
    **init_args['volume_image_data_loader']['train'])
val_vol_loader = NPYDataLoader(**init_args['volume_image_data_loader']['val'])

train_iter_args = init_args['volume_image_data_generator']['train'][
    'flow_from_loader']
train_iter_args['volume_image_data_loader'] = train_vol_loader
val_iter_args = init_args['volume_image_data_generator']['val'][
    'flow_from_loader']
val_iter_args['volume_image_data_loader'] = val_vol_loader

image_shape = train_datagen.image_shape
regularization_factor = 1e-2
model = Resnet3DBuilder.build_resnet_50(image_shape, nb_classes,
                                        regularization_factor)
compile_args = init_args['model']['compile']
compile_args['optimizer'] = Adam(lr=1e-3)
model.compile(**compile_args)

model_fit_args = init_args['model']['fit_generator']
model_fit_args['generator'] = train_datagen.flow_from_loader(**train_iter_args)
model_fit_args['validation_data'] = val_datagen.flow_from_loader(
    **val_iter_args)
model_fit_args['callbacks'] = [
    checkpointer, lr_reducer, early_stopper, csv_logger
]

model.fit_generator(**model_fit_args)
model.save('output/resnet50_{}.h5'.format(title))
Ejemplo n.º 6
0
import numpy as np
from resnet3d import Resnet3DBuilder

# pseudo volumetric data
X_train = np.random.rand(10, 64, 64, 32, 1)
labels = np.random.randint(0, 2, size=[10])
y_train = np.eye(2)[labels]

# train
model = Resnet3DBuilder.build_resnet_50((64, 64, 32, 1), 2, multilabel=True)
model.compile(loss="categorical_crossentropy", optimizer="sgd")
model.fit(X_train, y_train, batch_size=10)
Ejemplo n.º 7
0
copy(__file__, output_dir)

datapath = "data/modelnet/modelnet40_ply_hdf5_2048/"
test_size = 0.25
X_train, X_test, Y_train, Y_test = create_train_test(datapath, test_size)


print ("number of training examples = " + str(X_train.shape[0]))
print ("number of test examples = " + str(X_test.shape[0]))
print ("X_train shape: " + str(X_train.shape))
print ("Y_train shape: " + str(Y_train.shape))
print ("X_test shape: " + str(X_test.shape))
print ("Y_test shape: " + str(Y_test.shape))

dim = 64
model = Resnet3DBuilder.build_resnet_50((dim, dim, dim, 1), 40)
# model = Resnet3DBuilder.build((128, 128, 128, 1), 3, basic_block, [1, 1, 1, 1], reg_factor=1e-4)

# Uncomment the following block to do transfer learning!
# model.layers[-1].name = "dense_resnet_1" # Rename final dense layer so correct number of output classes is used (3 instead of MNIST 10)
# model.load_weights('/home/carsonmclean/dev/csc2541/csc2541/output/04-03-03:37/model.h5',
#                    by_name = True) # build_resnet_18
# model.load_weights('/home/carsonmclean/dev/csc2541/csc2541/output/04-09-00:25/model.h5',
#                    by_name = True) # [1,1,1,1]
# End of transfer learning

adam = Adam(lr=0.00001)
model.compile(optimizer=adam,
              loss='categorical_crossentropy',
              metrics=['accuracy'])
earlystop = EarlyStopping(monitor = 'val_acc',