Beispiel #1
0
def test_mcnn_accuracy():
    # Low accuracy is consistent with published results
    # https://github.com/hfawaz/dl-4-tsc/blob/master/README.md
    accuracy_test(
        network=MCNNClassifier(),
        lower=0.5 - ACCURACY_DEVIATION_THRESHOLD * 0.002,
        upper=0.5 + ACCURACY_DEVIATION_THRESHOLD * 0.002,
    )
Beispiel #2
0
def construct_all_classifiers(nb_epochs=None):
    """
    Creates a list of all classification networks ready for testing

    :param nb_epochs: int, if not None, value shall be set for all networks
    that accept it
    :return: map of strings to sktime_dl BaseDeepRegressor implementations
    """
    if nb_epochs is not None:
        # potentially quicker versions for tests
        return {
            'CNNClassifier_quick':
            CNNClassifier(nb_epochs=nb_epochs),
            'EncoderClassifier_quick':
            EncoderClassifier(nb_epochs=nb_epochs),
            'FCNClassifier_quick':
            FCNClassifier(nb_epochs=nb_epochs),
            'MCDCNNClassifier_quick':
            MCDCNNClassifier(nb_epochs=nb_epochs),
            'MCNNClassifier_quick':
            MCNNClassifier(nb_epochs=nb_epochs),
            'MLPClassifier_quick':
            MLPClassifier(nb_epochs=nb_epochs),
            'ResNetClassifier_quick':
            ResNetClassifier(nb_epochs=nb_epochs),
            'TLENETClassifier_quick':
            TLENETClassifier(nb_epochs=nb_epochs),
            'TWIESNClassifier_quick':
            TWIESNClassifier(),
            'InceptionTimeClassifier_quick':
            InceptionTimeClassifier(nb_epochs=nb_epochs),
        }
    else:
        # the 'literature-conforming' versions
        return {
            'CNNClassifier': CNNClassifier(),
            'EncoderClassifier': EncoderClassifier(),
            'FCNClassifier': FCNClassifier(),
            'MCDCNNClassifier': MCDCNNClassifier(),
            'MCNNClassifier': MCNNClassifier(),
            'MLPClassifier': MLPClassifier(),
            'ResNetClassifier': ResNetClassifier(),
            'TLENETClassifier': TLENETClassifier(),
            'TWIESNClassifier': TWIESNClassifier(),
            'InceptionTimeClassifier': InceptionTimeClassifier(),
        }
Beispiel #3
0
def setNetwork(data_dir, res_dir, cls, dset, fold, classifier=None):
    """
    Basic way of determining the classifier to build. To differentiate settings just and another elif. So, for example, if
    you wanted tuned TSF, you just pass TuneTSF and set up the tuning mechanism in the elif.
    This may well get superceded, it is just how e have always done it
    :param cls: String indicating which classifier you want
    :return: A classifier.

    """

    model_save_dir = res_dir + cls + "/Models/" + dset + "/"
    model_name = cls + "_" + dset + "_" + str(fold)

    try:
        os.makedirs(model_save_dir)
    except os.error:
        pass  # raises os.error if path already exists

    fold = int(fold)
    if cls.lower() == "cnn":
        return CNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "encoder":
        return EncoderClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "fcn":
        return FCNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "mcdcnn":
        return MCDCNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "mcnn":
        return MCNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "mlp":
        return MLPClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "resnet":
        return ResNetClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "tlenet":
        return TLENETClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "twiesn":
        return TWIESNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception0":
        return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception1":
        return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception2":
        return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception3":
        return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir)
    elif cls.lower() == "inception4":
        return InceptionTimeClassifier(random_state=fold)
    elif cls.lower() == "inceptiontime":
        return EnsembleFromFileClassifier(
            res_dir,
            dset,
            random_state=fold,
            network_name="inception",
            nb_iterations=5,
        )
    else:
        raise Exception("UNKNOWN CLASSIFIER: " + cls)
Beispiel #4
0
def allComparisonExperiments():
    data_dir = sys.argv[1]
    res_dir = sys.argv[2]

    classifier_names = [
        "dl4tsc_cnn",
        "dl4tsc_encoder",
        "dl4tsc_fcn",
        "dl4tsc_mcdcnn",
        "dl4tsc_mcnn",
        "dl4tsc_mlp",
        "dl4tsc_resnet",
        "dl4tsc_tlenet",
        "dl4tsc_twiesn",
        "dl4tsc_tunedcnn",
        "inception0",
        "inception1",
        "inception2",
        "inception3",
        "inception4",
        "inceptiontime",
    ]

    classifiers = [
        CNNClassifier(),
        EncoderClassifier(),
        FCNClassifier(),
        MCDCNNClassifier(),
        MCNNClassifier(),
        MLPClassifier(),
        ResNetClassifier(),
        TLENETClassifier(),
        TWIESNClassifier(),
        InceptionTimeClassifier(),
    ]

    num_folds = 30

    for f in range(num_folds):
        for d in ucr112dsets:
            for cname, c in zip(classifier_names, classifiers):
                print(cname, d, f)
                try:
                    dlExperiment(data_dir, res_dir, cname, d, f, classifier=c)
                    gc.collect()
                    keras.backend.clear_session()
                except:
                    print("\n\n FAILED: ", sys.exc_info()[0], "\n\n")
def set_classifier(cls, resampleId=None):
    """Construct a classifier.

    Basic way of creating the classifier to build using the default settings. This
    set up is to help with batch jobs for multiple problems to facilitate easy
    reproducability. You can set up bespoke classifier in many other ways.

    Parameters
    ----------
    cls: String indicating which classifier you want
    resampleId: classifier random seed

    Return
    ------
    A classifier.
    """
    name = cls.lower()
    # Convolutional
    if name == "cnn" or name == "cnnclassifier":
        return CNNClassifier(random_state=resampleId)
    elif name == "encode":
        return EncoderClassifier()
    elif name == "fcn":
        return FCNClassifier()
    elif name == "inceptiontime":
        return InceptionTimeClassifier()
    elif name == "mcdcnn":
        return MCDCNNClassifier()
    elif name == "mcnn":
        return MCNNClassifier()
    elif name == "mlp":
        return MLPClassifier()
    elif name == "resnet":
        return ResNetClassifier()
    elif name == "tlenet":
        return TLENETClassifier()
    elif name == "twiesn":
        return TWIESNClassifier()
    else:
        raise Exception("UNKNOWN CLASSIFIER")