def train(model): """Train the model.""" # Training dataset. dataset_train = CustomDataset() dataset_train.load_custom(args.dataset, "train") dataset_train.prepare() # Validation dataset dataset_val = CustomDataset() dataset_val.load_custom(args.dataset, "val") dataset_val.prepare() # add class weights CLASS_WEIGHTS = { 0:189, 1:22, 2:1, 3:40, 4:28, 5:85, 6:40, 7:63, 8:42, 9:5 } model_inference = modellib.MaskRCNN(mode="inference", config=CustomConfig(), model_dir=args.logs) # Custom callback to calculate mAP for each epich during training mean_average_precision_callback = modellib.MeanAveragePrecisionCallback(model, model_inference, dataset_val, calculate_map_at_every_X_epoch=10, log=args.logs, verbose=1) # add online augmentation augmentation = iaa.SomeOf((0, 3), [ iaa.Fliplr(0.5), iaa.Flipud(0.5), iaa.OneOf([iaa.Affine(rotate=90), iaa.Affine(rotate=180), iaa.Affine(rotate=270)]), iaa.Multiply((0.8, 1.5)), #iaa.GaussianBlur(sigma=(0.0, 5.0)) ]) def compute_weights(CLASS_WEIGHTS): mean = np.array(list(CLASS_WEIGHTS.values())).mean() # sum_class_occurence / nb_classes max_weight = np.array(list(CLASS_WEIGHTS.values())).max() CLASS_WEIGHTS.update((x, float(max_weight/(y))) for x, y in CLASS_WEIGHTS.items()) CLASS_WEIGHTS=dict(sorted(CLASS_WEIGHTS.items())) return CLASS_WEIGHTS class_weights = compute_weights(CLASS_WEIGHTS) # *** This training schedule is an example. Update to your needs *** # Since we're using a very small dataset, and starting from # COCO trained weights, we don't need to train too long. Also, # no need to train all layers, just the heads should do it. print("Training network heads") model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE, epochs=100, layers='all' #augmentation=augmentation, #class_weight=class_weights, #custom_callbacks=[mean_average_precision_callback] ) '''
def train(model): """Train the model.""" epoch_count = 0 # training cross-validation with 5 fold for i in range(5): # Training dataset. print("Training fold", i) dataset_train = dataset.CustomDataset() dataset_train.load_custom_K_fold(dataset_path, "train", i) dataset_train.prepare() # Validation dataset dataset_val = dataset.CustomDataset() dataset_val.load_custom_K_fold(dataset_path, "val", i) dataset_val.prepare() augmentation = imgaug.augmenters.Sometimes(0.5, [ imgaug.augmenters.Fliplr(0.5), imgaug.augmenters.Flipud(0.5)]) model_inference = modellib.MaskRCNN(mode="inference", config=config,model_dir=logs) mAP_callback = modellib.MeanAveragePrecisionCallback(model, model_inference, dataset_val, calculate_at_every_X_epoch=25, dataset_limit=500, verbose=1) # Training - Stage 1 epoch_count += 20 print("Training network heads") model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE *2, epochs= epoch_count, layers='heads', custom_callbacks=[mAP_callback]) #augmentation=augmentation) epoch_count += 10 print("Fine tune Resnet stage 4 and up") model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE, epochs= epoch_count, layers='4+', custom_callbacks=[mAP_callback], augmentation=augmentation)
def train(model): # Training set. dataset_train = dataset.CustomDataset() dataset_train.load_custom(dataset_path, "train") dataset_train.prepare() print("Images: {}\nClasses: {}".format(len(dataset_train.image_ids), dataset_train.class_names)) # Validation set dataset_val = dataset.CustomDataset() dataset_val.load_custom(dataset_path, "val") dataset_val.prepare() print("Images: {}\nClasses: {}".format(len(dataset_val.image_ids), dataset_val.class_names)) augmentation = imgaug.augmenters.Sometimes( 0.5, [imgaug.augmenters.Fliplr(0.5), imgaug.augmenters.Flipud(0.5)]) model_inference = modellib.MaskRCNN(mode="inference", config=config, model_dir=logs) #calculating COCO-mAP after every 5 epoch, limited to the first 1000 images mAP_callback = modellib.MeanAveragePrecisionCallback( model, model_inference, dataset_val, calculate_at_every_X_epoch=5, dataset_limit=1000, verbose=1) # Training - Stage 1 print("Training network heads") model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE, epochs=20, layers='heads', custom_callbacks=[mAP_callback], augmentation=augmentation)
def train(model, dataset_dir, subset='D2S_validation_train'): """Train the model.""" # Training dataset. Use the training set and 35K from the # validation set, as as in the Mask RCNN paper. dataset_train = CocoDataset() dataset_train.load_coco(dataset_dir, subset=subset) dataset_train.prepare() # Validation dataset dataset_val = CocoDataset() dataset_val.load_coco(dataset_dir, subset='D2S_validation_val') dataset_val.prepare() # Preparing mAP Callback model_inference = modellib.MaskRCNN(mode="inference", config=InferenceConfig(), model_dir=DEFAULT_LOGS_DIR) mean_average_precision_callback = modellib.MeanAveragePrecisionCallback( model, model_inference, dataset_val, 4, verbose=1) # Image Augmentation # Right/Left flip 50% of the time augmentation = imgaug.augmenters.Fliplr(0.5) # *** This training schedule is an example. Update to your needs *** # Training - Stage 1 print("Training network heads") model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE, epochs=10, layers='heads', augmentation=augmentation, custom_callbacks=[mean_average_precision_callback])
def train(model): """Train the model.""" # Training dataset. dataset_train = BubbleDataset() dataset_train.load_bubble(args.dataset, "train") dataset_train.prepare() # Validation dataset dataset_val = BubbleDataset() dataset_val.load_bubble(args.dataset, "val") dataset_val.prepare() model_inference = modellib.MaskRCNN(mode="inference", config=_InfConfig(), model_dir=args.logs) mean_average_precision_callback = modellib.MeanAveragePrecisionCallback( model, model_inference, dataset_val, 1, 32, verbose=1) # Image augmentation # http://imgaug.readthedocs.io/en/latest/source/augmenters.html augmentation = iaa.SomeOf( (0, 10), [ iaa.Fliplr(0.5), iaa.Flipud(0.5), iaa.Add((-40, 40)), iaa.AdditiveGaussianNoise(scale=(0, 0.2 * 255)), iaa.Multiply((0.25, 1)), iaa.MedianBlur(k=(3, 15)), iaa.SigmoidContrast(gain=(5, 10), cutoff=(0.1, 0.6)), iaa.Sharpen(alpha=(0.0, 1.0), lightness=(0.75, 1.1)), iaa.Affine(scale={ "x": (0.5, 2), "y": (0.1, 1.5) }), #(0.2,0.6) iaa.Affine(shear=(-40, 40)), iaa.PiecewiseAffine(scale=(0.01, 0.06)), iaa.OneOf([ iaa.Affine(rotate=90), iaa.Affine(rotate=180), iaa.Affine(rotate=270) ]) ]) # *** This training schedule is an example. Update to your needs *** # Since we're using a very small dataset, and starting from # COCO trained weights, we don't need to train too long. Also, # no need to train all layers, just the heads should do it. model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE / 10, epochs=10, augmentation=augmentation, layers='5+', custom_callbacks=[mean_average_precision_callback]) model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE / 100, epochs=20, augmentation=augmentation, layers='5+', custom_callbacks=[mean_average_precision_callback]) model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE / 1000, epochs=30, augmentation=augmentation, layers='5+', custom_callbacks=[mean_average_precision_callback])
''' model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE, epochs=10, layers='heads') ''' model_inference = modellib.MaskRCNN(mode="inference", config=config, model_dir=MODEL_DIR) mean_average_precision_callback = modellib.MeanAveragePrecisionCallback( model, model_inference, dataset_val, calculate_map_at_every_X_epoch=1, verbose=1) model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE, epochs=300, layers='all', custom_callbacks=[mean_average_precision_callback]) ''' # Fine tune all layers # Passing layers="all" trains all layers. You can also # pass a regular expression to select which layers to # train by name pattern. model.train(dataset_train, dataset_val,
def train(model): """Train the model.""" # Training dataset. print("Loading training dataset") dataset_train = CropDiseaseDataset() dataset_train.load_crop_disease(args.dataset, "train") dataset_train.prepare() print("Image Count: {}".format(len(dataset_train.image_ids))) print("Class Count: {}".format(dataset_train.num_classes)) for i, info in enumerate(dataset_train.class_info): print("{:3}. {:50}".format(i, info['name'])) # image_ids = np.random.choice(dataset_train.image_ids, 4) # for image_id in image_ids: # image = dataset_train.load_image(image_id) # mask, class_ids = dataset_train.load_mask(image_id) # # visualize.display_top_masks(image, mask, class_ids, dataset_train.class_names) # visualize.display_instances(image, # np.array([[ 0, 1, 41, 55]]), # mask, class_ids, # ["BG", "mask"], # scores=[1.0], # show_bbox=False) # Validation dataset print("Loading validation set") dataset_val = CropDiseaseDataset() dataset_val.load_crop_disease(args.dataset, "val") dataset_val.prepare() print("Image Count: {}".format(len(dataset_val.image_ids))) print("Class Count: {}".format(dataset_val.num_classes)) for i, info in enumerate(dataset_val.class_info): print("{:3}. {:50}".format(i, info['name'])) # Test dataset print("Loading test set") dataset_test = CropDiseaseDataset() dataset_test.load_crop_disease(args.dataset, "test") dataset_test.prepare() print("Image Count: {}".format(len(dataset_test.image_ids))) print("Class Count: {}".format(dataset_test.num_classes)) for i, info in enumerate(dataset_test.class_info): print("{:3}. {:50}".format(i, info['name'])) model_inference = modellib.MaskRCNN(mode="inference", config=InferenceConfig(), model_dir=model.model_dir) test_map_callback = modellib.MeanAveragePrecisionCallback( model, model_inference, dataset_test, calculate_at_every_X_epoch=1, label="test_mean_average_precision") val_map_callback = modellib.MeanAveragePrecisionCallback( model, model_inference, dataset_val, calculate_at_every_X_epoch=1) train_map_callback = modellib.MeanAveragePrecisionCallback( model, model_inference, dataset_train, calculate_at_every_X_epoch=1, label="train_mean_average_precision") print("Start training") model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE, epochs=100, layers='all', custom_callbacks=[ val_map_callback, test_map_callback, train_map_callback ])