Example #1
0
def construct_all_classifiers(nb_epochs=None):
    """
    Creates a list of all classification networks ready for testing

    Parameters
    ----------
    nb_epochs: int, if not None, value shall be set for all networks that accept it

    Returns
    -------
    map of strings to sktime_dl BaseDeepRegressor implementations
    """
    if nb_epochs is not None:
        # potentially quicker versions for tests
        return {
            'CNNClassifier_quick':
            CNNClassifier(nb_epochs=nb_epochs),
            'EncoderClassifier_quick':
            EncoderClassifier(nb_epochs=nb_epochs),
            'FCNClassifier_quick':
            FCNClassifier(nb_epochs=nb_epochs),
            'MCDCNNClassifier_quick':
            MCDCNNClassifier(nb_epochs=nb_epochs),
            'MCNNClassifier_quick':
            MCNNClassifier(nb_epochs=nb_epochs),
            'MLPClassifier_quick':
            MLPClassifier(nb_epochs=nb_epochs),
            'ResNetClassifier_quick':
            ResNetClassifier(nb_epochs=nb_epochs),
            'TLENETClassifier_quick':
            TLENETClassifier(nb_epochs=nb_epochs),
            'TWIESNClassifier_quick':
            TWIESNClassifier(),
            'InceptionTimeClassifier_quick':
            InceptionTimeClassifier(nb_epochs=nb_epochs),
            "MACNNClassifier_quick":
            MACNNClassifier(nb_epochs=nb_epochs)
        }
    else:
        # the 'literature-conforming' versions
        return {
            'CNNClassifier': CNNClassifier(),
            'EncoderClassifier': EncoderClassifier(),
            'FCNClassifier': FCNClassifier(),
            'MCDCNNClassifier': MCDCNNClassifier(),
            'MCNNClassifier': MCNNClassifier(),
            'MLPClassifier': MLPClassifier(),
            'ResNetClassifier': ResNetClassifier(),
            'TLENETClassifier': TLENETClassifier(),
            'TWIESNClassifier': TWIESNClassifier(),
            'InceptionTimeClassifier': InceptionTimeClassifier(),
            "MACNNClassifier": MACNNClassifier()
        }
Example #2
0
    def __init__(
            self,
            base_model=CNNClassifier(),
            param_grid=dict(
                kernel_size=[3, 7], avg_pool_size=[2, 3],
                nb_conv_layers=[1, 2],
            ),
            search_method="grid",
            cv_folds=5,
            random_state=0,
            verbose=False,
            model_name=None,
            model_save_directory=None,
    ):
        """
        :param base_model: an implementation of BaseDeepLearner, the model
        to tune :param param_grid: dict, parameter names corresponding to
        parameters of the base_model, mapped to values to search over :param
        search_method: string out of ['grid', 'random'], how to search over
        the param_grid :param cv_folds: int, number of cross validation
        folds to use in evaluation of each parameter set :param random_state:
        int, seed to any needed random actions :param verbose: boolean,
        whether to output extra information :param model_name: string,
        the name of this model for printing and file writing purposes. if
        None, will default to 'tuned_' + base_model.model_name :param
        model_save_directory: string, if not None; location to save the
        tuned, trained keras model in hdf5 format
        """

        self.verbose = verbose

        if model_name is None:
            self.model_name = "tuned_" + base_model.model_name
        else:
            self.model_name = model_name

        self.model_save_directory = model_save_directory

        self.random_state = random_state
        self.random_state = np.random.RandomState(self.random_state)
        self._is_fitted = False

        self.base_model = base_model

        # search parameters
        self.param_grid = param_grid
        self.cv_folds = cv_folds
        self.search_method = search_method
        self.n_jobs = 1  # assuming networks themselves are threaded/on gpu,
        # not providing this option for now

        # search results (computed in fit)
        self.grid_history = None
        self.grid = None
        self.model = (
            None  # the best _keras model_, not the sktime classifier object
        )
        self.tuned_params = None
def forecasting_example():
    name = "C:\\Users\\Tony\\OneDrive - University of East Anglia\\Research\\Alex " \
           "Mcgregor Grant\\randomNoise.csv"


    y = pd.read_csv(name, index_col=0, squeeze=True, dtype={1: np.float})
    forecast_horizon = np.arange(1, 2)
    forecaster = NaiveForecaster(strategy="last")
    forecaster.fit(y)
    y_pred = forecaster.predict(forecast_horizon)
    print("Next predicted value = ",y_pred)
    # https://github.com/alan-turing-institute/sktime/blob/main/examples/01_forecasting.ipynb
    #Reduce to a regression problem through windowing.
    ##Transform forecasting into regression

    np_y = y.to_numpy()
    v = sliding_window_view(y, 100)
    print("Window shape =",v.shape)
    v_3d = np.expand_dims(v, axis=1)
    print("Window shape =",v.shape)
    print(v_3d.shape)
    z = v[:,2]
    print(z.shape)
    regressor = CNNRegressor()
    classifier = CNNClassifier()
    regressor.fit(v_3d,z)
    p = regressor.predict(v_3d)
    #print(p)
    d = np.array([0.0])
    c = np.digitize(z,d)
    classifier = RandomIntervalSpectralForest()
    classifier.fit(v_3d,c)
    cls = classifier.predict(v_3d)
    print(cls)
Example #4
0
def allComparisonExperiments():
    data_dir = sys.argv[1]
    res_dir = sys.argv[2]

    classifier_names = [
        "dl4tsc_cnn",
        "dl4tsc_encoder",
        "dl4tsc_fcn",
        "dl4tsc_mcdcnn",
        "dl4tsc_mcnn",
        "dl4tsc_mlp",
        "dl4tsc_resnet",
        "dl4tsc_tlenet",
        "dl4tsc_twiesn",
        "dl4tsc_tunedcnn",
        "inception0",
        "inception1",
        "inception2",
        "inception3",
        "inception4",
        "inceptiontime",
    ]

    classifiers = [
        CNNClassifier(),
        EncoderClassifier(),
        FCNClassifier(),
        MCDCNNClassifier(),
        MCNNClassifier(),
        MLPClassifier(),
        ResNetClassifier(),
        TLENETClassifier(),
        TWIESNClassifier(),
        InceptionTimeClassifier(),
    ]

    num_folds = 30

    for f in range(num_folds):
        for d in ucr112dsets:
            for cname, c in zip(classifier_names, classifiers):
                print(cname, d, f)
                try:
                    dlExperiment(data_dir, res_dir, cname, d, f, classifier=c)
                    gc.collect()
                    keras.backend.clear_session()
                except:
                    print("\n\n FAILED: ", sys.exc_info()[0], "\n\n")
Example #5
0
def test_is_fitted(network=CNNClassifier()):
    """
    testing that the networks correctly recognise when they are not fitted
    """

    X_train, y_train = load_italy_power_demand(split="train", return_X_y=True)

    if isinstance(network, BaseRegressor):
        # Create some regression values, taken from test_regressor
        y_train = np.zeros(len(y_train))
        for i in range(len(X_train)):
            y_train[i] = X_train.iloc[i].iloc[0].iloc[0]

    # try to predict without fitting: SHOULD fail
    with pytest.raises(NotFittedError):
        network.predict(X_train[:10])
Example #6
0
def test_basic_tuning(network=TunedDeepLearningClassifier(
    base_model=CNNClassifier(),
    param_grid=dict(nb_epochs=[50, 100], ),
    cv_folds=3,
)):
    """
    just a super basic test of the tuner
    """

    print("Start test_basic_tuning()")

    X_train, y_train = load_italy_power_demand(split="train", return_X_y=True)
    X_test, y_test = load_italy_power_demand(split="test", return_X_y=True)

    network.fit(X_train[:10], y_train[:10])

    print(network.score(X_test[:10], y_test[:10]))
    print("End test_basic_tuning()")
def set_classifier(cls, resampleId=None):
    """Construct a classifier.

    Basic way of creating the classifier to build using the default settings. This
    set up is to help with batch jobs for multiple problems to facilitate easy
    reproducability. You can set up bespoke classifier in many other ways.

    Parameters
    ----------
    cls: String indicating which classifier you want
    resampleId: classifier random seed

    Return
    ------
    A classifier.
    """
    name = cls.lower()
    # Convolutional
    if name == "cnn" or name == "cnnclassifier":
        return CNNClassifier(random_state=resampleId)
    elif name == "encode":
        return EncoderClassifier()
    elif name == "fcn":
        return FCNClassifier()
    elif name == "inceptiontime":
        return InceptionTimeClassifier()
    elif name == "mcdcnn":
        return MCDCNNClassifier()
    elif name == "mcnn":
        return MCNNClassifier()
    elif name == "mlp":
        return MLPClassifier()
    elif name == "resnet":
        return ResNetClassifier()
    elif name == "tlenet":
        return TLENETClassifier()
    elif name == "twiesn":
        return TWIESNClassifier()
    else:
        raise Exception("UNKNOWN CLASSIFIER")
def test_basic_inmem(network=DeepLearnerEnsembleClassifier(
    base_model=CNNClassifier(nb_epochs=50),
    nb_iterations=2,
    keep_in_memory=True,
    model_save_directory=None,
    verbose=True,
)):
    """
    just a super basic test with gunpoint,
        load data,
        construct classifier,
        fit,
        score
    """

    print("Start test_basic()")

    X_train, y_train = load_italy_power_demand(split="train", return_X_y=True)
    X_test, y_test = load_italy_power_demand(split="test", return_X_y=True)

    network.fit(X_train[:10], y_train[:10])

    print(network.score(X_test[:10], y_test[:10]))
    print("End test_basic()")
def test_basic_saving(network=DeepLearnerEnsembleClassifier(
    base_model=CNNClassifier(nb_epochs=50),
    nb_iterations=2,
    keep_in_memory=False,
    model_save_directory="testResultsDELETE",
    verbose=True,
)):
    """
    just a super basic test with gunpoint,
        load data,
        construct classifier,
        fit,
        score
    """

    print("Start test_basic()")

    path = Path(network.model_save_directory)
    # if the directory doesn't get cleaned up because of error in testing
    if not path.exists():
        path.mkdir()

    X_train, y_train = load_italy_power_demand(split="train", return_X_y=True)
    X_test, y_test = load_italy_power_demand(split="test", return_X_y=True)

    network.fit(X_train[:10], y_train[:10])

    print(network.score(X_test[:10], y_test[:10]))

    (path /
     (network.base_model.model_name + "_0.hdf5")).unlink()  # delete file
    (path /
     (network.base_model.model_name + "_1.hdf5")).unlink()  # delete file
    path.rmdir()  # directory should now be empty, fails if not

    print("End test_basic()")
Example #10
0
def test_cnn_accuracy():
    accuracy_test(
        network=CNNClassifier(),
        lower=0.955 - ACCURACY_DEVIATION_THRESHOLD * 0.004,
    )
Example #11
0
def setNetwork(data_dir, res_dir, cls, dset, fold, classifier=None):
    """
    Basic way of determining the classifier to build. To differentiate settings just and another elif. So, for example, if
    you wanted tuned TSF, you just pass TuneTSF and set up the tuning mechanism in the elif.
    This may well get superceded, it is just how e have always done it
    :param cls: String indicating which classifier you want
    :return: A classifier.

    """

    model_save_dir = res_dir + cls + "/Models/" + dset + "/"
    model_name = cls + "_" + dset + "_" + str(fold)

    try:
        os.makedirs(model_save_dir)
    except os.error:
        pass  # raises os.error if path already exists

    fold = int(fold)
    if cls.lower() == "cnn":
        return CNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "encoder":
        return EncoderClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "fcn":
        return FCNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "mcdcnn":
        return MCDCNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "mcnn":
        return MCNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "mlp":
        return MLPClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "resnet":
        return ResNetClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "tlenet":
        return TLENETClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "twiesn":
        return TWIESNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception0":
        return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception1":
        return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception2":
        return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception3":
        return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception4":
        return InceptionTimeClassifier(random_state=fold)
    elif cls.lower() == "cntc":
        return CNTCClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "lstmfcn":
        return LSTMFCNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "tapnet":
        return TapNetClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower()=="macnn":
        return MACNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inceptiontime":
        return EnsembleFromFileClassifier(
            res_dir,
            dset,
            random_state=fold,
            network_name="inception",
            nb_iterations=5,
        )
    else:
        raise Exception("UNKNOWN CLASSIFIER: " + cls)