def test_vol_loader(): return NPYDataLoader(**init_args['volume_image_data_loader']['val'])
def train_vol_loader(): return NPYDataLoader(**init_args['volume_image_data_loader']['train'])
factor=np.sqrt(0.1), cooldown=0, patience=10, min_lr=1e-6) early_stopper = EarlyStopping(monitor='val_loss', min_delta=0.001, patience=100) csv_logger = CSVLogger('output/{}_{}_ctd.csv'.format( datetime.datetime.now().isoformat(), title)) train_datagen = VolumeImageDataGenerator( **init_args['volume_image_data_generator']['train']['init']) val_datagen = VolumeImageDataGenerator( **init_args['volume_image_data_generator']['val']['init']) train_vol_loader = NPYDataLoader( **init_args['volume_image_data_loader']['train']) val_vol_loader = NPYDataLoader(**init_args['volume_image_data_loader']['val']) train_iter_args = init_args['volume_image_data_generator']['train'][ 'flow_from_loader'] train_iter_args['volume_image_data_loader'] = train_vol_loader val_iter_args = init_args['volume_image_data_generator']['val'][ 'flow_from_loader'] val_iter_args['volume_image_data_loader'] = val_vol_loader image_shape = train_datagen.image_shape regularization_factor = 1 model = Resnet3DBuilder.build_resnet_18(image_shape, nb_classes, regularization_factor) model.load_weights(weights) compile_args = init_args['model']['compile']
init_args = yaml.load(stream) except yaml.YAMLError as exc: print(exc) parser = argparse.ArgumentParser(description='Predict given weights') parser.add_argument('weights', help='Trained weights') args = parser.parse_args() if args.weights: weights = args.weights title = re.sub('^output/resnet50_[a-z]+_', '', weights.strip('.h5')) print("Prediction by {}.".format(title)) test_datagen = VolumeImageDataGenerator( **init_args['volume_image_data_generator']['test']['init']) test_vol_loader = NPYDataLoader( **init_args['volume_image_data_loader']['test']) test_iter_args = init_args['volume_image_data_generator']['test']['flow_from_loader'] test_iter_args['volume_image_data_loader'] = test_vol_loader model = load_model(weights) model_pred_args = init_args['model']['predict_generator'] model_pred_args['generator'] = test_datagen.flow_from_loader( **test_iter_args) predictions = model.predict_generator(**model_pred_args) print(predictions.flatten()) print(len(predictions)) df_subm = pd.DataFrame(columns=['id', 'cancer'])
from keras.models import (load_model) from preprocessing.volume_image import (VolumeImageDataGenerator) from preprocessing.image_loader import (NPYDataLoader) import yaml with open("config.yml", 'r') as stream: try: config_args = yaml.load(stream) except yaml.YAMLError as exc: print(exc) test_datagen = VolumeImageDataGenerator( **config_args['volume_image_data_generator']['test']) test_vol_loader = NPYDataLoader( **config_args['volume_image_data_loader']['test']) iterator_args = config_args['volume_image_data_generator']['flow_from_loader'] test_iter_args = iterator_args.copy() test_iter_args['volume_image_data_loader'] = test_vol_loader model = load_model('output/resnet34_stage1.h5') df_subm = pd.DataFrame(columns=['id', 'cancer']) for idx, fn in enumerate(test_vol_loader.filenames): x = test_vol_loader.load(fn) x = test_datagen.standardize(x) x = x[np.newaxis, ...] print("Predicting {} (batch shape: {})".format(fn, x.shape)) proba = model.test(x, batch_size=1) print(proba)