Esempio n. 1
0
def test_freeze_layers():
    """ Correct layers are frozen.
    """
    model = deepmoji_transfer(5, 30)
    keyword = 'softmax'

    model = freeze_layers(model, unfrozen_keyword=keyword)

    for layer in model.layers:
        if layer is not None and len(layer.trainable_weights):
            if keyword in layer.name:
                assert layer.trainable
            else:
                assert not layer.trainable
Esempio n. 2
0
def test_freeze_layers():
    """ Correct layers are frozen.
    """
    model = deepmoji_transfer(5, 30)
    keyword = 'softmax'

    model = freeze_layers(model, unfrozen_keyword=keyword)

    for layer in model.layers:
        if layer is not None and len(layer.trainable_weights):
            if keyword in layer.name:
                assert layer.trainable
            else:
                assert not layer.trainable
Esempio n. 3
0
def class_avg_finetune(model,
                       texts,
                       labels,
                       nb_classes,
                       batch_size,
                       method,
                       epoch_size=5000,
                       nb_epochs=1000,
                       error_checking=True,
                       verbose=True):
    """ Compiles and finetunes the given model.

    # Arguments:
        model: Model to be finetuned
        texts: List of three lists, containing tokenized inputs for training,
            validation and testing (in that order).
        labels: List of three lists, containing labels for training,
            validation and testing (in that order).
        nb_classes: Number of classes in the dataset.
        batch_size: Batch size.
        method: Finetuning method to be used. For available methods, see
            FINETUNING_METHODS in global_variables.py. Note that the model
            should be defined accordingly (see docstring for deepmoji_transfer())
        epoch_size: Number of samples in an epoch.
        nb_epochs: Number of epochs. Doesn't matter much as early stopping is used.
        error_checking: If set to True, warnings will be printed when the label
            list has the wrong dimensions.
        verbose: Verbosity flag.

    # Returns:
        Model after finetuning,
        score after finetuning using the class average F1 metric.
    """

    if method not in FINETUNING_METHODS:
        raise ValueError('ERROR (class_avg_tune_trainable): '
                         'Invalid method parameter. '
                         'Available options: {}'.format(FINETUNING_METHODS))

    (X_train, y_train) = (texts[0], labels[0])
    (X_val, y_val) = (texts[1], labels[1])
    (X_test, y_test) = (texts[2], labels[2])

    checkpoint_path = '{}/deepmoji-checkpoint-{}.hdf5' \
                      .format(WEIGHTS_DIR, str(uuid.uuid4()))

    f1_init_path = '{}/deepmoji-f1-init-{}.hdf5' \
                   .format(WEIGHTS_DIR, str(uuid.uuid4()))

    # Check dimension of labels
    if error_checking:
        # Binary classification has two classes but one value
        expected_shape = 1 if nb_classes == 2 else nb_classes

        for ls in [y_train, y_val, y_test]:
            if len(ls.shape) <= 1 or not ls.shape[1] == expected_shape:
                print('WARNING (class_avg_tune_trainable): '
                      'The dimension of the provided '
                      'labels do not match the expected value. '
                      'Expected: {}, actual: {}'.format(
                          expected_shape, ls.shape[1]))
                break

    if method in ['last', 'new']:
        lr = 0.001
    elif method in ['full', 'chain-thaw']:
        lr = 0.0001

    loss = 'binary_crossentropy'

    # Freeze layers if using last
    if method == 'last':
        model = freeze_layers(model, unfrozen_keyword='softmax')

    # Compile model, for chain-thaw we compile it later (after freezing)
    if method != 'chain-thaw':
        adam = Adam(clipnorm=1, lr=lr)
        model.compile(loss=loss, optimizer=adam, metrics=['accuracy'])

    # Training
    if verbose:
        print('Method:  {}'.format(method))
        print('Classes: {}'.format(nb_classes))

    if method == 'chain-thaw':
        result = class_avg_chainthaw(model,
                                     nb_classes=nb_classes,
                                     train=(X_train, y_train),
                                     val=(X_val, y_val),
                                     test=(X_test, y_test),
                                     batch_size=batch_size,
                                     loss=loss,
                                     epoch_size=epoch_size,
                                     nb_epochs=nb_epochs,
                                     checkpoint_weight_path=checkpoint_path,
                                     f1_init_weight_path=f1_init_path,
                                     verbose=verbose)
    else:
        result = class_avg_tune_trainable(
            model,
            nb_classes=nb_classes,
            train=(X_train, y_train),
            val=(X_val, y_val),
            test=(X_test, y_test),
            epoch_size=epoch_size,
            nb_epochs=nb_epochs,
            batch_size=batch_size,
            init_weight_path=f1_init_path,
            checkpoint_weight_path=checkpoint_path,
            verbose=verbose)
    return model, result