def setNetwork(data_dir, res_dir, cls, dset, fold, classifier=None): """ Basic way of determining the classifier to build. To differentiate settings just and another elif. So, for example, if you wanted tuned TSF, you just pass TuneTSF and set up the tuning mechanism in the elif. This may well get superceded, it is just how e have always done it :param cls: String indicating which classifier you want :return: A classifier. """ model_save_dir = res_dir + cls + "/Models/" + dset + "/" model_name = cls + "_" + dset + "_" + str(fold) try: os.makedirs(model_save_dir) except os.error: pass # raises os.error if path already exists fold = int(fold) if cls.lower() == "cnn": return CNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "encoder": return EncoderClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "fcn": return FCNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "mcdcnn": return MCDCNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "mcnn": return MCNNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "mlp": return MLPClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "resnet": return ResNetClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "tlenet": return TLENETClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "twiesn": return TWIESNClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "inception0": return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "inception1": return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "inception2": return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "inception3": return InceptionTimeClassifier(random_state=fold, model_name=model_name, model_save_directory=model_save_dir) elif cls.lower() == "inception4": return InceptionTimeClassifier(random_state=fold) elif cls.lower() == "inceptiontime": return EnsembleFromFileClassifier( res_dir, dset, random_state=fold, network_name="inception", nb_iterations=5, ) else: raise Exception("UNKNOWN CLASSIFIER: " + cls)
def construct_all_classifiers(nb_epochs=None): """ Creates a list of all classification networks ready for testing :param nb_epochs: int, if not None, value shall be set for all networks that accept it :return: map of strings to sktime_dl BaseDeepRegressor implementations """ if nb_epochs is not None: # potentially quicker versions for tests return { 'CNNClassifier_quick': CNNClassifier(nb_epochs=nb_epochs), 'EncoderClassifier_quick': EncoderClassifier(nb_epochs=nb_epochs), 'FCNClassifier_quick': FCNClassifier(nb_epochs=nb_epochs), 'MCDCNNClassifier_quick': MCDCNNClassifier(nb_epochs=nb_epochs), 'MCNNClassifier_quick': MCNNClassifier(nb_epochs=nb_epochs), 'MLPClassifier_quick': MLPClassifier(nb_epochs=nb_epochs), 'ResNetClassifier_quick': ResNetClassifier(nb_epochs=nb_epochs), 'TLENETClassifier_quick': TLENETClassifier(nb_epochs=nb_epochs), 'TWIESNClassifier_quick': TWIESNClassifier(), 'InceptionTimeClassifier_quick': InceptionTimeClassifier(nb_epochs=nb_epochs), } else: # the 'literature-conforming' versions return { 'CNNClassifier': CNNClassifier(), 'EncoderClassifier': EncoderClassifier(), 'FCNClassifier': FCNClassifier(), 'MCDCNNClassifier': MCDCNNClassifier(), 'MCNNClassifier': MCNNClassifier(), 'MLPClassifier': MLPClassifier(), 'ResNetClassifier': ResNetClassifier(), 'TLENETClassifier': TLENETClassifier(), 'TWIESNClassifier': TWIESNClassifier(), 'InceptionTimeClassifier': InceptionTimeClassifier(), }
def __init__( self, base_model=InceptionTimeClassifier(), nb_iterations=5, keep_in_memory=False, random_seed=0, verbose=False, model_name=None, model_save_directory=None, ): """ :param base_model: an implementation of BaseDeepClassifier, the model to ensemble over. MUST NOT have had fit called on it :param nb_iterations: int, the number of models to ensemble over :param keep_in_memory: boolean, if True, all models will be kept in memory while fitting/predicting. Otherwise, models will be written to/read from file individually while fitting/predicting. model_name and model_save_directory must be set in this case :param random_state: int, seed to any needed random actions :param verbose: boolean, whether to output extra information :param model_name: string, the name of this model for printing and file writing purposes. if None, will default to base_model.model_name + '_ensemble' :param model_save_directory: string, if not None; location to save the trained BASE MODELS of the ensemble """ self.verbose = verbose if model_name is None: self.model_name = base_model.model_name + "_ensemble" else: self.model_name = model_name self.model_save_directory = model_save_directory self._is_fitted = False if base_model.is_fitted: raise ValueError( "base_model to ensemble over cannot have already been " "fit(...) to data") self.base_model = base_model self.nb_iterations = nb_iterations self.keep_in_memory = keep_in_memory # calced in fit self.classes_ = None self.nb_classes = -1 self.skdl_models = [] self.keras_models = [] self.random_seed = random_seed self.random_state = random_seed
def create_landmarkers_deep(db_name, subsample_output_dir, landmarker_output_dir): data = arff.loadarff("%s/%s/%s.arff" % (subsample_output_dir, db_name, db_name)) df = pd.DataFrame(data[0]) df['target'] = df['target'].str.decode("utf-8") data = df.values[:, :-1] data = np.asarray(data).astype(np.float32) classes = df.values[:, -1] skf = StratifiedKFold(n_splits=2) skf.get_n_splits(data, classes) StratifiedKFold(n_splits=2, random_state=None, shuffle=False) for train_index, test_index in skf.split(data, classes): #print("TRAIN:", train_index, "TEST:", test_index) print("splitting") train_x, test_x = data[train_index, :], data[test_index, :] train_y, test_y = classes[train_index], classes[test_index] X = pd.DataFrame() X["dim_0"] = [pd.Series(train_x[x, :]) for x in range(len(train_x))] train_x = X X = pd.DataFrame() X["dim_0"] = [pd.Series(test_x[x, :]) for x in range(len(test_x))] test_x = X start_time = time.time() network = ResNetClassifier(nb_epochs=200) network.fit(train_x, train_y) accu = network.score(test_x, test_y) print("--- %s seconds ---" % (time.time() - start_time)) t = time.time() - start_time np.savetxt("%s/%s" % (landmarker_output_dir, db_name) + "_Resnet.txt", np.array([accu, t / 60])) start_time = time.time() network = InceptionTimeClassifier(nb_epochs=200) network.fit(train_x, train_y) accu = network.score(test_x, test_y) print("--- %s seconds ---" % (time.time() - start_time)) t = time.time() - start_time np.savetxt("%s/%s" % (landmarker_output_dir, db_name) + "_Inception.txt", np.array([accu, t / 60]))
def allComparisonExperiments(): data_dir = sys.argv[1] res_dir = sys.argv[2] classifier_names = [ "dl4tsc_cnn", "dl4tsc_encoder", "dl4tsc_fcn", "dl4tsc_mcdcnn", "dl4tsc_mcnn", "dl4tsc_mlp", "dl4tsc_resnet", "dl4tsc_tlenet", "dl4tsc_twiesn", "dl4tsc_tunedcnn", "inception0", "inception1", "inception2", "inception3", "inception4", "inceptiontime", ] classifiers = [ CNNClassifier(), EncoderClassifier(), FCNClassifier(), MCDCNNClassifier(), MCNNClassifier(), MLPClassifier(), ResNetClassifier(), TLENETClassifier(), TWIESNClassifier(), InceptionTimeClassifier(), ] num_folds = 30 for f in range(num_folds): for d in ucr112dsets: for cname, c in zip(classifier_names, classifiers): print(cname, d, f) try: dlExperiment(data_dir, res_dir, cname, d, f, classifier=c) gc.collect() keras.backend.clear_session() except: print("\n\n FAILED: ", sys.exc_info()[0], "\n\n")
def set_classifier(cls, resampleId=None): """Construct a classifier. Basic way of creating the classifier to build using the default settings. This set up is to help with batch jobs for multiple problems to facilitate easy reproducability. You can set up bespoke classifier in many other ways. Parameters ---------- cls: String indicating which classifier you want resampleId: classifier random seed Return ------ A classifier. """ name = cls.lower() # Convolutional if name == "cnn" or name == "cnnclassifier": return CNNClassifier(random_state=resampleId) elif name == "encode": return EncoderClassifier() elif name == "fcn": return FCNClassifier() elif name == "inceptiontime": return InceptionTimeClassifier() elif name == "mcdcnn": return MCDCNNClassifier() elif name == "mcnn": return MCNNClassifier() elif name == "mlp": return MLPClassifier() elif name == "resnet": return ResNetClassifier() elif name == "tlenet": return TLENETClassifier() elif name == "twiesn": return TWIESNClassifier() else: raise Exception("UNKNOWN CLASSIFIER")
def test_inception_accuracy(): accuracy_test(network=InceptionTimeClassifier(), lower=0.96)
X_train = np.reshape(X_train, X_train.shape + (1, )) X_test = np.reshape(X_test, X_test.shape + (1, )) print('Train: ' + str(X_train.shape)) print('Test: ' + str(X_test.shape)) names = ['CNN', 'FCN', 'MLP', 'InceptionTime', 'ResNet', 'Encoder'] epochs = 500 batch = 16 classifiers = [ CNNClassifier(nb_epochs=epochs, batch_size=batch, verbose=False), FCNClassifier(nb_epochs=epochs, batch_size=batch, verbose=False), MLPClassifier(nb_epochs=epochs, batch_size=batch, verbose=False), InceptionTimeClassifier(nb_epochs=epochs, verbose=False), ResNetClassifier(nb_epochs=epochs, batch_size=batch, verbose=False), EncoderClassifier(nb_epochs=epochs, batch_size=batch, verbose=False), ] for name, clf in zip(names, classifiers): start = timeit.default_timer() clf.fit(X_train, y_train) y_pred = clf.predict(X_test) duration = timeit.default_timer() - start accuracy = accuracy_score(y_test, y_pred) recall = recall_score(y_test, y_pred) precision = precision_score(y_test, y_pred) print(str(' - '+ name + ': \n accuracy: ' + '%.4f' % accuracy + ' | recall: ' + '%.4f' % recall \ + ' | precision: ' + '%.4f' % precision + ' | duration: ' + '%.4f' % duration)) '''