コード例 #1
0
ファイル: models.py プロジェクト: polceanum/fnn
    def fit(self, X, y=None, subsample=None):
        """Fit the model with a time series X

        Parameters
        ----------
        X : array-like, shape (n_timepoints, n_features)
            Training data, where n_timepoints is the number of timepoints
            and n_features is the number of features.

        y : None
            Ignored variable.

        subsample : int or None
            If set to an integer, a random number of timepoints is selected
            equal to that integer

        Returns
        -------
        X_new : array-like, shape (n_timepoints, n_components)
            Transformed values.
        """
        # Make hankel matrix from dataset
        Xs = standardize_ts(X)
        X_train = hankel_matrix(Xs, self.time_window)

        if subsample:
            self.train_indices, X_train = resample_dataset(
                X_train, subsample, random_state=self.random_state)
        if self.time_lag > 0:
            self.model.fit([np.reshape(X_train, (X_train.shape[0], -1))])
        else:
            self.model.fit(np.reshape(X_train, (X_train.shape[0], -1)))
コード例 #2
0
    path = './datasets/pezzetto/*'
    print("Loading data from {}".format(path))

    dataset, Nu, error_function, optimization_problem, TR_indexes, VL_indexes, TS_indexes = load_EQ(
        path, F1_score)
    error_function = ACC_eq

    X_train = np.array([dataset.inputs[i] for i in TR_indexes] +
                       [dataset.inputs[i] for i in VL_indexes])
    y_train = np.array([dataset.targets[i] for i in TR_indexes] +
                       [dataset.targets[i] for i in VL_indexes])
    X_test = np.array([dataset.inputs[i] for i in TS_indexes])
    y_test = np.array([dataset.targets[i] for i in TS_indexes])

    X_train, y_train = resample_dataset(X_train, y_train,
                                        int(X_train.shape[-1] / 2))
    X_test, y_test = resample_dataset(X_test, y_test,
                                      int(X_test.shape[-1] / 2))
    original_len = X_train.shape[-1]
    print("DATASET TR_LEN={}, TS_LEN={}".format(len(X_train), len(X_test)))

    # load configuration for Earthquake task
    configs = config_EQ(list(range(X_test.shape[0])), Nu)

    plt = plot_timeseries_clf(X_train[:3], y_train[:3], save=False)
    plt.show()

    params = {
        "design_deep": [False],
        "Nl": [4],
        "rhos": [0.5, 0.7],
コード例 #3
0
ファイル: models.py プロジェクト: polceanum/fnn
    def fit(self,
            X,
            y=None,
            subsample=None,
            tau=0,
            learning_rate=1e-3,
            batch_size=100,
            train_steps=200,
            loss='mse',
            verbose=0,
            optimizer="adam",
            early_stopping=False):
        """Fit the model with a time series X

        Parameters
        ----------
        X : array-like, shape (n_timepoints, n_features)
            Training data, where n_samples is the number of samples
            and n_features is the number of features.

        y : None
            Ignored variable.

        subsample : int or None
            If set to an integer, a random number of timepoints is selected
            equal to that integer
            
        tau : int
            The prediction time, or the number of timesteps to skip between 
            the input and output time series


        Returns
        -------
        X_new : array-like, shape (n_timepoints, n_components)
            Transformed values.
        """
        # Make hankel matrix from dataset
        Xs = standardize_ts(X)

        # X_train = hankel_matrix(Xs, self.time_window)
        # Split the hankel matrix for a prediction task
        X0 = hankel_matrix(Xs, self.time_window + tau)
        X_train = X0[:, :self.time_window]
        Y_train = X0[:, -self.time_window:]

        if subsample:
            self.train_indices, _ = resample_dataset(
                X_train, subsample, random_state=self.random_state)
            X_train = X_train[self.train_indices]
            Y_train = Y_train[self.train_indices]

        optimizers = {
            "adam": tf.keras.optimizers.Adam(lr=learning_rate),
            "nadam": tf.keras.optimizers.Nadam(lr=learning_rate)
            # "radam": tfa.optimizers.RectifiedAdam(lr=learning_rate),
        }

        tf.random.set_seed(self.random_state)
        np.random.seed(self.random_state)
        self.model.compile(
            optimizer=optimizers[optimizer],
            loss=loss,
            #experimental_run_tf_function=False
        )

        if early_stopping:
            callbacks = [
                tf.keras.callbacks.EarlyStopping(monitor='loss',
                                                 mode='min',
                                                 patience=3)
            ]
        else:
            callbacks = [None]

        self.train_history = self.model.fit(x=tf.convert_to_tensor(X_train),
                                            y=tf.convert_to_tensor(Y_train),
                                            epochs=train_steps,
                                            batch_size=batch_size,
                                            verbose=verbose)