Esempio n. 1
0
    def batch_learning(train_data, test_data, args):
        rgwr = GammaGWR()
        utils = Utilities()

        utils.remove_files(FILES_FOLDER)  # Clear the directory for new data.

        # Learn the model.
        train_dataset, train_dimension, train_labelSet = utils.describe_data(
            train_data, args.dataset_name)
        rgwr.initNetwork(train_dimension,
                         numClasses=args.num_classes,
                         numWeights=args.num_weights)
        rgwr.train(train_dataset,
                   train_labelSet,
                   maxEpochs=args.epochs,
                   insertionT=args.threshold,
                   beta=CONTEXT_BETA,
                   epsilon_b=LEARNING_RATE_B,
                   epsilon_n=LEARNING_RATE_N)

        # Test the model.
        test_dataset, test_dimension, test_labelSet = utils.describe_data(
            test_data, args.dataset_name)
        rgwr.evaluate_model(rgwr, train_dataset, train_labelSet, mode='train')
        rgwr.evaluate_model(rgwr, test_dataset, test_labelSet, mode='test')

        utils.save_predicted_metrics(rgwr, '')
Esempio n. 2
0
            maxColumn = max(dataSet[:, i])
            minColumn = min(dataSet[:, i])
            for j in range(0, size[0]):
                oDataSet[j, i] = (dataSet[j, i] - minColumn) / (maxColumn -
                                                                minColumn)

    if (importFlag):
        file = open("myGammaGWR" + '.network', 'r')
        dataPickle = file.read()
        file.close()
        myGammaGWR = GammaGWR()
        myGammaGWR.__dict__ = cPickle.loads(dataPickle)

    if (trainFlag):
        myGammaGWR = GammaGWR()
        myGammaGWR.initNetwork(dimension, numWeights=2, numClasses=3)
        myGammaGWR.train(oDataSet,
                         labelSet,
                         maxEpochs=25,
                         insertionT=0.85,
                         beta=0.5,
                         epsilon_b=0.2,
                         epsilon_n=0.001)

    if (saveFlag):
        file = open("myGammaGWR" + '.network', 'w')
        file.write(cPickle.dumps(myGammaGWR.__dict__))
        file.close()

    if (testFlag):
        bmuWeights, bmuActivation, bmuLabel = myGammaGWR.predict(oDataSet)
Esempio n. 3
0
    def iterative_learning(train_data, test_data, args, category_column):
        rgwr = GammaGWR()
        utils = Utilities()

        utils.remove_files(FILES_FOLDER)  # Clear the directory for new data.

        train_accuracies = []
        test_accuracies = []
        mini_batch_size = 5

        iterations = 10
        all_object_classes = np.unique(train_data[:, category_column])
        random.shuffle(all_object_classes)

        rgwr.initNetwork(DATA_DIMENSION,
                         numClasses=args.num_classes,
                         numWeights=args.num_weights)

        for iteration in range(0, iterations):
            objects_to_learn = all_object_classes[mini_batch_size *
                                                  iteration:mini_batch_size *
                                                  iteration + mini_batch_size]

            # Learn the model.
            train_data_prepared = train_data[np.in1d(
                train_data[:, category_column], objects_to_learn)]

            train_dataset, train_dimension, train_labelSet = utils.describe_data(
                train_data_prepared, args.dataset_name)
            rgwr.train(train_dataset,
                       train_labelSet,
                       maxEpochs=args.epochs,
                       insertionT=args.threshold,
                       beta=CONTEXT_BETA,
                       epsilon_b=LEARNING_RATE_B,
                       epsilon_n=LEARNING_RATE_N)

            # Test the model.
            test_dataset, test_dimension, test_labelSet = utils.describe_data(
                test_data, args.dataset_name)
            train_accuracy = rgwr.evaluate_model(rgwr,
                                                 train_dataset,
                                                 train_labelSet,
                                                 mode='train')
            test_accuracy = rgwr.evaluate_model(rgwr,
                                                test_dataset,
                                                test_labelSet,
                                                mode='test')

            train_accuracies.append(train_accuracy)
            test_accuracies.append(test_accuracy)

            utils.save_predicted_metrics(rgwr, iteration)

            if iteration == 0:
                number_neurons = pickle.load(
                    open("./saved_data/num_neurons" + str(iteration) + '.file',
                         "rb"))
            else:
                previous_neurons = pickle.load(
                    open("./saved_data/num_neurons" + str(iteration) + '.file',
                         "rb"))
                number_neurons = np.append(number_neurons, previous_neurons)

        with open('./saved_data/test_accuracies.file', "wb") as f:
            pickle.dump(test_accuracies, f, pickle.HIGHEST_PROTOCOL)
        with open('./saved_data/train_accuracies.file', "wb") as f:
            pickle.dump(train_accuracies, f, pickle.HIGHEST_PROTOCOL)
        with open("./saved_data/num_neurons.file", "wb") as f:
            pickle.dump(number_neurons, f, pickle.HIGHEST_PROTOCOL)

        print("Object classes order: ", all_object_classes)
        print("Train accuracies: ", train_accuracies)
        print("Test accuracies: ", test_accuracies)
    # ------------------------------------ Novelty detection ----------------------------------------------------------
    if NOVELTY_DETECTION:
        utils.remove_files(FILES_FOLDER)  # Clear the directory for new data.
        test_accuracies = []
        learnt_objects = []
        fp = []
        tp = []
        fn = []
        tn = []
        novel_objects_detected = 0
        learnt_objects_detected = 0
        all_object_classes = np.unique(train_data[:, INSTANCE_COLUMN])
        random.shuffle(all_object_classes)

        rgwr.initNetwork(DATA_DIMENSION,
                         numClasses=args.num_classes,
                         numWeights=args.num_weights)
        activation_mean = 0
        activation_sd = 0

        for i in all_object_classes[:30]:
            # Learn the model.
            train_data_prepared = train_data[np.in1d(
                train_data[:, INSTANCE_COLUMN], i)]
            train_dataSet, train_dimension, train_labelSet = utils.describe_data(
                train_data_prepared, args.dataset_name)
            rgwr.train(train_dataSet,
                       train_labelSet,
                       maxEpochs=args.epochs,
                       insertionT=args.threshold,
                       beta=CONTEXT_BETA,