Exemplo n.º 1
0
def find_best_lr(batch_size=24, model_key=0):
    base_path, morfeus_drive = return_paths()
    excel_path = os.path.join(morfeus_drive, 'ModelParameters.xlsx')
    min_lr = 1e-5
    max_lr = 10
    model_base = return_model(model_key=model_key)
    loss = CosineLoss()
    features_list = ('Model_Type', 'Optimizer', 'step_factor')
    for iteration in [0, 1]:
        for optimizer in ['SGD']:
            out_path = os.path.join(morfeus_drive, 'Learning_Rates',
                                    'Model_Key_{}'.format(model_key))
            if not isinstance(model_base, types.FunctionType):
                model = model_base
                base_df = pd.read_excel(excel_path)
                current_run = {
                    'Model_Type': [model_key],
                    'run?': [0],
                    'step_factor': [10],
                    'Loss': ['CosineLoss'],
                    'Optimizer': ['SGD']
                }
                current_run_df = pd.DataFrame(current_run)
                contained = is_df_within_another(data_frame=base_df,
                                                 current_run_df=current_run_df,
                                                 features_list=('Model_Type',
                                                                'Optimizer',
                                                                'step_factor'))
                if not contained:
                    model_index = 0
                    while model_index in base_df['Model_Index'].values:
                        model_index += 1
                    current_run_df.insert(0,
                                          column='Model_Index',
                                          value=model_index)
                    current_run_df.set_index('Model_Index')
                    base_df = base_df.append(current_run_df)
                    base_df.to_excel(excel_path, index=0)
                else:
                    for key in features_list:
                        base_df = base_df.loc[base_df[key] ==
                                              current_run_df[key].values[0]]
                    model_index = base_df.Model_Index.values[0]
                out_path = os.path.join(out_path,
                                        'Model_Index_{}'.format(model_index),
                                        '{}_Iteration'.format(iteration))
                if os.path.exists(out_path):
                    continue
            else:
                model, out_path = return_model_and_things(
                    model_base=model_base,
                    out_path=out_path,
                    iteration=iteration,
                    excel_path=excel_path)
                if model is None:
                    continue
            os.makedirs(out_path)
            _, _, train_generator, validation_generator = return_generators(
                batch_size=batch_size,
                model_key=model_key,
                cross_validation_id=-1,
                cache=True)
            print(out_path)
            k = TensorBoard(log_dir=out_path,
                            profile_batch=0,
                            write_graph=True)
            k.set_model(model)
            k.on_train_begin()
            if optimizer == 'SGD':
                lr_opt = tf.keras.optimizers.SGD
            elif optimizer == 'Adam':
                lr_opt = tf.keras.optimizers.Adam
            elif optimizer == 'RAdam':
                lr_opt = RectifiedAdam
            LearningRateFinder(epochs=10,
                               model=model,
                               metrics=['accuracy'],
                               out_path=out_path,
                               optimizer=lr_opt,
                               loss=loss,
                               steps_per_epoch=1000,
                               train_generator=train_generator.data_set,
                               lower_lr=min_lr,
                               high_lr=max_lr)
            tf.keras.backend.clear_session()
            return False  # repeat!
    return True
Exemplo n.º 2
0
def train_model(epochs=None,
                model_name='3D_Fully_Atrous',
                run_best=False,
                debug=False,
                combined_lobes=False,
                gaussian=False):
    num_classes = 9
    if combined_lobes:
        num_classes = 6
    save_a_model = False
    if run_best:
        save_a_model = True
    batch_size = 1
    step_size_factor = 8
    for iteration in range(3):
        base_path, morfeus_drive = return_paths()
        base_dict = return_base_dict_dense(step_size_factor=step_size_factor)
        overall_dictionary = return_dictionary_dense(base_dict,
                                                     run_best=run_best,
                                                     added_gaussian=gaussian)
        overall_dictionary = np.asarray(overall_dictionary)
        perm = np.arange(len(overall_dictionary))
        np.random.shuffle(perm)
        overall_dictionary = overall_dictionary[perm]
        if debug:
            i = 0
            _, _, train_generator, validation_generator = return_generators()
        for run_data in overall_dictionary:
            run_data['num_classes'] = num_classes
            run_data['pool_z'] = 4
            run_data['save_a_model'] = save_a_model
            if debug:
                layers_dict = get_layers_dict_dense(**run_data, pool=(2, 4, 4))
                model = my_UNet(layers_dict=layers_dict,
                                image_size=(None, None, None, 1),
                                mask_output=True,
                                concat_not_add=True,
                                out_classes=num_classes,
                                explictly_defined=True)
                Model_val = model.created_model
                Model_val.compile(tf.keras.optimizers.Adam(),
                                  loss=tf.keras.losses.CategoricalCrossentropy(
                                      from_logits=False),
                                  metrics=[
                                      tf.keras.metrics.CategoricalAccuracy(),
                                      MeanDSC(num_classes=num_classes)
                                  ])
                i += 1
                k = TensorBoard(
                    log_dir=r'H:\Liver_Lobe_tensorboard\test\{}'.format(i),
                    profile_batch=0,
                    histogram_freq=5,
                    write_graph=True)
                k.set_model(Model_val)
                k.on_train_begin()
                tf.keras.backend.clear_session()
                continue
            if debug:
                return None
            tf.random.set_seed(iteration)
            run_data['batch_size'] = batch_size
            excel_path = os.path.join(morfeus_drive,
                                      'parameters_list_by_trial_id.xlsx')
            print(base_path)
            run_data['Iteration'] = iteration
            run_data['Trial_ID'] = 0
            features_list = list(run_data.keys())
            if determine_if_in_excel(excel_path,
                                     run_data,
                                     features_list=features_list):
                continue
            trial_id = run_data['Trial_ID']
            hparams = return_hparams(run_data,
                                     features_list=features_list,
                                     excluded_keys=[])

            layers_dict = get_layers_dict_dense(**run_data, pool=(4, 4, 4))
            paths_class = Path_Return_Class(base_path=base_path,
                                            morfeus_path=morfeus_drive,
                                            save_model=save_a_model,
                                            is_keras_model=False)
            paths_class.define_model_things(model_name,
                                            'Trial_ID_{}'.format(trial_id))
            tensorboard_output = paths_class.tensorboard_path_out
            print(tensorboard_output)
            if os.listdir(tensorboard_output):
                print('already done')
                continue
            records_add = ''
            if run_data['gaussian']:
                records_add = '_Gaussian'
            _, _, train_generator, validation_generator = return_generators(
                combined_lobes=combined_lobes,
                records_add=records_add,
                change_background=run_data['change_background'])
            step_size = len(train_generator)
            run_model(trial_id=str(trial_id),
                      layers_dict=layers_dict,
                      train_generator=train_generator,
                      step_size=step_size,
                      validation_generator=validation_generator,
                      paths_class=paths_class,
                      morfeus_drive=morfeus_drive,
                      hparams=hparams,
                      base_path=base_path,
                      epochs=epochs,
                      **run_data)
            return None  # break out!
Exemplo n.º 3
0
optimizer = AdamOptimizer(0.001)
tbcb = TensorBoard(log_dir='/logs',
                   histogram_freq=1,
                   write_graph=True,
                   write_grads=True,
                   write_images=True,
                   embeddings_freq=1)

loss_history = []
cos_loss_history = []

T = 0.0
t0 = time.time()

tbcb.set_model(model)
tbcb.on_train_begin()

f = open('log.txt', 'w')

try:
    for epoch in range(200):
        tbcb.on_epoch_begin(epoch)

        cos_loss = CosineSimilarity()

        for step in (range(num_batch)):
            tbcb.on_train_batch_begin(step)
            print('========== step: {:03d} / {:03d} ============\r'.format(
                step, num_batch),
                  end='')
Exemplo n.º 4
0
earlystop.on_train_begin()

modelcheckpoint = ModelCheckpoint(filepath="weights/",
                                  monitor="val_loss",
                                  verbose=1,
                                  save_best_only=True)
modelcheckpoint.set_model(model)
modelcheckpoint.on_train_begin()

reduce_lr = ReduceLROnPlateau(monitor="val_loss", patience=10, verbose=1)
reduce_lr.set_model(model)
reduce_lr.on_train_begin()

tensorboard = TensorBoard(log_dir="logs/")
tensorboard.set_model(model)
tensorboard.on_train_begin()

epochs = 3
train_logs_dict = {}
test_logs_dict = {}
for epoch in range(epochs):
    training_acc, testing_acc, training_loss, testing_loss = [], [], [], []
    print("\nStart of epoch %d" % (epoch + 1, ))
    # Iterate over the batches of the dataset.
    modelcheckpoint.on_epoch_begin(epoch)
    earlystop.on_epoch_begin(epoch)
    reduce_lr.on_epoch_begin(epoch)
    tensorboard.on_epoch_begin(epoch)
    for x_batch_train, y_batch_train in get_batch(batch_size, x_train,
                                                  y_train):
def find_best_lr(path_desc='', combined_lobes=False, records_add=''):
    min_lr = 1e-7
    max_lr = 1e-1
    num_classes = 9
    if combined_lobes:
        num_classes = 6
    max_conv_blocks = 3
    iteration = 0
    change_background = True
    for weighted_loss in [True, False]:
        for atrous in [True, False]:
            for layer in [2, 3]:
                for growth_rate in [4]:
                    for filters in [8]:
                        for num_conv_blocks in [
                                2
                        ]:  # Always start with 1 before downsampling
                            for conv_lambda in [1]:
                                base_path, morfeus_drive = return_paths()
                                run_data = {
                                    'layers': layer,
                                    'max_conv_blocks': max_conv_blocks,
                                    'filters': filters,
                                    'num_conv_blocks': num_conv_blocks,
                                    'conv_lambda': conv_lambda,
                                    'growth_rate': growth_rate,
                                    'atrous': atrous
                                }
                                layers_dict = get_layers_dict_dense(
                                    **run_data,
                                    pool=(2, 4, 4),
                                    num_classes=num_classes)
                                things = [
                                    'change_background{}'.format(
                                        change_background),
                                    'weighted_loss_{}'.format(weighted_loss),
                                    'atrous_{}'.format(atrous),
                                    'layers{}'.format(layer),
                                    'max_conv_blocks_{}'.format(
                                        max_conv_blocks),
                                    'filters_{}'.format(filters),
                                    'num_conv_blocks_{}'.format(
                                        num_conv_blocks),
                                    'conv_lambda_{}'.format(conv_lambda),
                                    'growth_rate_{}'.format(growth_rate),
                                    '{}_Iteration'.format(iteration)
                                ]
                                out_path = os.path.join(
                                    morfeus_drive, path_desc, 'DenseNet')
                                if records_add != '':
                                    things = ['Gaussian'] + things
                                for thing in things:
                                    out_path = os.path.join(out_path, thing)
                                if os.path.exists(out_path):
                                    print('already done')
                                    continue
                                os.makedirs(out_path)
                                print(out_path)
                                model = my_UNet(
                                    layers_dict=layers_dict,
                                    image_size=(None, None, None, 1),
                                    mask_output=True,
                                    out_classes=num_classes,
                                    explictly_defined=True).created_model
                                k = TensorBoard(log_dir=out_path,
                                                profile_batch=0,
                                                write_graph=True)
                                k.set_model(model)
                                k.on_train_begin()
                                lr_opt = tf.keras.optimizers.Adam
                                base_path, morfeus_drive, train_generator, validation_generator = return_generators(
                                    combined_lobes=combined_lobes,
                                    records_add=records_add,
                                    change_background=change_background)
                                loss = tf.keras.losses.CategoricalCrossentropy(
                                    from_logits=False)
                                LearningRateFinder(
                                    epochs=10,
                                    model=model,
                                    metrics=[
                                        'categorical_accuracy',
                                        MeanDSC(num_classes=num_classes)
                                    ],
                                    out_path=out_path,
                                    optimizer=lr_opt,
                                    loss=loss,
                                    steps_per_epoch=len(train_generator),
                                    train_generator=train_generator.data_set,
                                    lower_lr=min_lr,
                                    high_lr=max_lr)
                                tf.keras.backend.clear_session()
                                return None  # repeat!
def find_best_lr(batch_size=24):
    tf.random.set_seed(3141)
    base_path, morfeus_drive, excel_path = return_paths()

    # if base_path.startswith('H'):  # Only run this locally
    #     create_excel_values(excel_path=excel_path)
    for iteration in [0]:
        out_path = os.path.join(morfeus_drive, 'Learning_Rates')
        model_parameters, out_path = return_model_parameters(
            out_path=out_path, excel_path=excel_path, iteration=iteration)
        if model_parameters is None:
            continue
        model_key = model_parameters['Model_Type']
        optimizer = model_parameters['Optimizer']
        model_base = return_model(model_key=model_key)
        model = model_base(**model_parameters)
        if model_parameters['loss'] == 'CosineLoss':
            loss = CosineLoss()
            min_lr = 1e-6
            max_lr = 1e-1
        elif model_parameters['loss'] == 'CategoricalCrossEntropy':
            loss = tf.keras.losses.CategoricalCrossentropy()
            min_lr = 1e-10
            max_lr = 1e-3
        _, _, train_generator, validation_generator = return_generators(
            batch_size=batch_size,
            model_key=model_key,
            all_training=True,
            cache=True,
            cache_add='LR_Finder_{}'.format(model_key))
        print(out_path)
        k = TensorBoard(log_dir=out_path, profile_batch=0, write_graph=True)
        k.set_model(model)
        k.on_train_begin()
        lr_opt = tf.keras.optimizers.Adam
        if optimizer == 'SGD':
            lr_opt = tf.keras.optimizers.SGD
        elif optimizer == 'Adam':
            lr_opt = tf.keras.optimizers.Adam
        elif optimizer == 'RAdam':
            lr_opt = RectifiedAdam
        METRICS = [
            metrics.TruePositives(name='TruePositive'),
            metrics.FalsePositives(name='FalsePositive'),
            metrics.TrueNegatives(name='TrueNegative'),
            metrics.FalseNegatives(name='FalseNegative'),
            metrics.CategoricalAccuracy(name='Accuracy'),
            metrics.Precision(name='Precision'),
            metrics.Recall(name='Recall'),
            metrics.AUC(name='AUC'),
        ]
        LearningRateFinder(epochs=10,
                           model=model,
                           metrics=METRICS,
                           out_path=out_path,
                           optimizer=lr_opt,
                           loss=loss,
                           steps_per_epoch=1000,
                           train_generator=train_generator.data_set,
                           lower_lr=min_lr,
                           high_lr=max_lr)
        tf.keras.backend.clear_session()
        return False  # repeat!
    return True