rep = 3
            elif val_groups[i] in ['c', 'g', 'k', 'l']:
                rep = 2
            val_idx.extend([val_idx0[i]] * rep)
        val_idx = np.asarray(val_idx)

        validation_steps = len(val_idx)
        steps_per_epoch = 5 * int(len(train_idx) / batch_size)

        print('Training fold', it)
        print('steps_per_epoch', steps_per_epoch, 'validation_steps',
              validation_steps)

        data_gen = BaseMaskDatasetIterator(train_idx,
                                           random_transformers=[
                                               aug_mega_hardcore((-0.25, 0.6)),
                                               aug_mega_hardcore((-0.6, 0.25))
                                           ],
                                           batch_size=batch_size,
                                           shuffle=True,
                                           seed=1)

        np.random.seed(it + 111)
        random.seed(it + 111)
        tf.set_random_seed(it + 111)

        #        tbCallback = TensorBoard(log_dir="tb_logs/densenet_softmax_{0}".format(it), histogram_freq=0, write_graph=True, write_images=False)

        lrSchedule = LearningRateScheduler(lambda epoch: schedule_steps(
            epoch, [(1e-5, 2), (3e-4, 4), (1e-4, 6)]))
            rep = 1
            if val_groups[i] in ['b', 'd', 'e', 'n']:
                rep = 4
            elif val_groups[i] in ['c']:
                rep = 3
            val_idx.extend([val_idx0[i]] * rep)
        val_idx = np.asarray(val_idx) 
        
        validation_steps = len(val_idx)
        steps_per_epoch = 5 * int(len(train_idx) / batch_size)

        print('Training fold', it)
        print('steps_per_epoch', steps_per_epoch, 'validation_steps', validation_steps)

        data_gen = BaseMaskDatasetIterator(train_idx,
                     random_transformers=[aug_mega_hardcore((-0.25, 0.6)), aug_mega_hardcore((-0.6, 0.25))],
                     batch_size=batch_size,
                     shuffle=True,
                     seed=1
                     )
        
        np.random.seed(it+111)
        random.seed(it+111)
        tf.set_random_seed(it+111)
        
#        tbCallback = TensorBoard(log_dir="tb_logs/inception_softmax_{0}".format(it), histogram_freq=0, write_graph=True, write_images=False)
        
        
        model = get_inception_resnet_v2_unet_softmax((None, None), weights=None)
        model.load_weights(path.join(models_folder, 'inception_resnet_v2_weights_{0}.h5'.format(it)))
        lrSchedule = LearningRateScheduler(lambda epoch: schedule_steps(epoch, [(1e-6, 2), (1e-5, 10), (5e-6, 20), (3e-6, 25)]))