Beispiel #1
0
    def fit(self,
            x,
            y,
            epochs=1000000,
            batch_size=None,
            verbose=1,
            callbacks=None,
            validation_split=0.,
            validation_data=None,
            shuffle=True,
            class_weight=None,
            sample_weight=None,
            initial_epoch=0,
            steps_per_epoch=None,
            validation_steps=None,
            **kwargs):

        callbacks = callbacks or list()
        callbacks.append(custom_callbacks.E2EFSCallback(verbose=verbose))
        self.model.fit(x,
                       y,
                       epochs=epochs,
                       batch_size=batch_size,
                       verbose=verbose,
                       callbacks=callbacks,
                       validation_split=validation_split,
                       validation_data=validation_data,
                       shuffle=shuffle,
                       class_weight=class_weight,
                       sample_weight=sample_weight,
                       initial_epoch=initial_epoch,
                       steps_per_epoch=steps_per_epoch,
                       validation_steps=validation_steps,
                       **kwargs)
Beispiel #2
0
def train_Keras(train_X,
                train_y,
                test_X,
                test_y,
                kwargs,
                e2efs_class=None,
                n_features=None,
                epochs=150):
    normalization = normalization_func()
    num_classes = train_y.shape[-1]

    norm_train_X = normalization.fit_transform(train_X)
    norm_test_X = normalization.transform(test_X)

    batch_size = max(2, len(train_X) // 50)
    class_weight = train_y.shape[0] / np.sum(train_y, axis=0)
    class_weight = num_classes * class_weight / class_weight.sum()
    sample_weight = None
    print('mu :', kwargs['mu'], ', batch_size :', batch_size)
    print('reps : ', reps, ', weights : ', class_weight)
    if num_classes == 2:
        sample_weight = np.zeros((len(norm_train_X), ))
        sample_weight[train_y[:, 1] == 1] = class_weight[1]
        sample_weight[train_y[:, 1] == 0] = class_weight[0]
        class_weight = None

    svc_model = LinearSVC(nfeatures=norm_train_X.shape[1:], **kwargs)
    svc_model.create_keras_model(nclasses=num_classes)

    model_clbks = [
        callbacks.LearningRateScheduler(scheduler()),
    ]

    fs_callbacks = []

    if e2efs_class is not None:
        classifier = svc_model.model
        e2efs_layer = e2efs_class(n_features,
                                  input_shape=norm_train_X.shape[1:])
        model = e2efs_layer.add_to_model(classifier,
                                         input_shape=norm_train_X.shape[1:])
        fs_callbacks.append(
            clbks.E2EFSCallback(factor_func=None,
                                units_func=None,
                                verbose=verbose))
    else:
        model = svc_model.model
        e2efs_layer = None

    optimizer = optimizer_class(e2efs_layer, lr=initial_lr)

    model.compile(loss=LinearSVC.loss_function(loss_function, class_weight),
                  optimizer=optimizer,
                  metrics=[LinearSVC.accuracy])

    if e2efs_class is not None:
        model.fs_layer = e2efs_layer
        model.heatmap = e2efs_layer.moving_heatmap

        start_time = time.process_time()
        model.fit(norm_train_X,
                  train_y,
                  batch_size=batch_size,
                  epochs=200000,
                  callbacks=fs_callbacks,
                  validation_data=(norm_test_X, test_y),
                  class_weight=class_weight,
                  sample_weight=sample_weight,
                  verbose=verbose)
        model.fs_time = time.process_time() - start_time

    model.fit(norm_train_X,
              train_y,
              batch_size=batch_size,
              epochs=epochs,
              callbacks=model_clbks,
              validation_data=(norm_test_X, test_y),
              class_weight=class_weight,
              sample_weight=sample_weight,
              verbose=verbose)

    model.normalization = normalization

    return model
Beispiel #3
0
def train_Keras(train_X,
                train_y,
                test_X,
                test_y,
                kwargs,
                e2efs_class=None,
                n_features=None,
                e2efs_kwargs=None,
                T=300,
                extra=300):
    normalization = normalization_func()
    num_classes = train_y.shape[-1]

    norm_train_X = normalization.fit_transform(train_X)
    norm_test_X = normalization.transform(test_X)

    batch_size = max(2, len(train_X) // 50)
    class_weight = train_y.shape[0] / np.sum(train_y, axis=0)
    class_weight = num_classes * class_weight / class_weight.sum()
    sample_weight = None
    print('reps : ', reps, ', weights : ', class_weight)
    if num_classes == 2:
        sample_weight = np.zeros((len(norm_train_X), ))
        sample_weight[train_y[:, 1] == 1] = class_weight[1]
        sample_weight[train_y[:, 1] == 0] = class_weight[0]
        class_weight = None

    classifier = three_layer_nn(nfeatures=norm_train_X.shape[1:], **kwargs)

    model_clbks = [
        callbacks.LearningRateScheduler(
            scheduler(extra=0 if e2efs_class is None else extra)),
    ]

    if e2efs_class is not None:
        e2efs_layer = e2efs_class(n_features,
                                  input_shape=norm_train_X.shape[1:],
                                  **e2efs_kwargs)
        model = e2efs_layer.add_to_model(classifier,
                                         input_shape=norm_train_X.shape[1:])
        model_clbks.append(
            clbks.E2EFSCallback(factor_func=e2efs_factor(T),
                                units_func=None,
                                verbose=verbose))
    else:
        model = classifier
        e2efs_layer = None

    optimizer = optimizer_class(e2efs_layer, lr=1e-3)
    model_epochs = epochs
    if e2efs_class is not None:
        model_epochs += extra_epochs

    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['acc'])

    if e2efs_class is not None:
        model.fs_layer = e2efs_layer
        model.heatmap = e2efs_layer.moving_heatmap

    model.fit(norm_train_X,
              train_y,
              batch_size=batch_size,
              epochs=model_epochs,
              callbacks=model_clbks,
              validation_data=(norm_test_X, test_y),
              class_weight=class_weight,
              sample_weight=sample_weight,
              verbose=verbose)

    model.normalization = normalization
    return model
def train_Keras(train_X,
                train_y,
                test_X,
                test_y,
                kwargs,
                e2efs_class=None,
                n_features=None,
                epochs=150,
                fine_tuning=True):
    normalization = normalization_func()
    num_classes = train_y.shape[-1]

    norm_train_X = normalization.fit_transform(train_X)
    norm_test_X = normalization.transform(test_X)

    batch_size = max(2, len(train_X) // 50)
    class_weight = train_y.shape[0] / np.sum(train_y, axis=0)
    class_weight = num_classes * class_weight / class_weight.sum()
    sample_weight = None
    print('r :', kwargs['regularization'], ', batch_size :', batch_size)
    print('reps : ', reps, ', weights : ', class_weight)
    if num_classes == 2:
        sample_weight = np.zeros((len(norm_train_X), ))
        sample_weight[train_y[:, 1] == 1] = class_weight[1]
        sample_weight[train_y[:, 1] == 0] = class_weight[0]
        class_weight = None

    classifier = three_layer_nn(nfeatures=norm_train_X.shape[1:], **kwargs)

    model_clbks = [
        callbacks.LearningRateScheduler(scheduler()),
    ]

    fs_callbacks = []

    if e2efs_class is not None:
        classifier = three_layer_nn(nfeatures=norm_train_X.shape[1:], **kwargs)
        e2efs_layer = e2efs_class(n_features,
                                  input_shape=norm_train_X.shape[1:])
        model = e2efs_layer.add_to_model(classifier,
                                         input_shape=norm_train_X.shape[1:])
        fs_callbacks.append(clbks.E2EFSCallback(units=5, verbose=verbose))
    else:
        model = three_layer_nn(nfeatures=norm_train_X.shape[1:], **kwargs)
        e2efs_layer = None

    optimizer = optimizer_class(e2efs_layer, lr=initial_lr)

    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['acc'])

    if e2efs_class is not None:
        model.fs_layer = e2efs_layer
        model.heatmap = e2efs_layer.moving_heatmap

        start_time = time.process_time()
        model.fit(norm_train_X,
                  train_y,
                  batch_size=batch_size,
                  epochs=200000,
                  callbacks=fs_callbacks,
                  validation_data=(norm_test_X, test_y),
                  class_weight=class_weight,
                  sample_weight=sample_weight,
                  verbose=verbose)
        model.fs_time = time.process_time() - start_time

    if fine_tuning:
        model.fit(norm_train_X,
                  train_y,
                  batch_size=batch_size,
                  epochs=epochs,
                  callbacks=model_clbks,
                  validation_data=(norm_test_X, test_y),
                  class_weight=class_weight,
                  sample_weight=sample_weight,
                  verbose=verbose)

    model.normalization = normalization

    return model