Exemplo n.º 1
0
    def batch_learning(train_data, test_data, args):
        rgwr = GammaGWR()
        utils = Utilities()

        utils.remove_files(FILES_FOLDER)  # Clear the directory for new data.

        # Learn the model.
        train_dataset, train_dimension, train_labelSet = utils.describe_data(
            train_data, args.dataset_name)
        rgwr.initNetwork(train_dimension,
                         numClasses=args.num_classes,
                         numWeights=args.num_weights)
        rgwr.train(train_dataset,
                   train_labelSet,
                   maxEpochs=args.epochs,
                   insertionT=args.threshold,
                   beta=CONTEXT_BETA,
                   epsilon_b=LEARNING_RATE_B,
                   epsilon_n=LEARNING_RATE_N)

        # Test the model.
        test_dataset, test_dimension, test_labelSet = utils.describe_data(
            test_data, args.dataset_name)
        rgwr.evaluate_model(rgwr, train_dataset, train_labelSet, mode='train')
        rgwr.evaluate_model(rgwr, test_dataset, test_labelSet, mode='test')

        utils.save_predicted_metrics(rgwr, '')
Exemplo n.º 2
0
    TEST_SESSIONS = [1]  # MIX session.
    CAMERA = 1  # Two cameras available. right=2.
    NUMBER_FRAMES = 75  # The number of frames per session. 4 sessions make 300 (75 * 4) session for an object.
    CATEGORY_COLUMN = 256
    INSTANCE_COLUMN = 257
    SESSION_COLUMN = 258
    DAY_COLUMN = 259
    CAMERA_COLUMN = 260
    IMAGE_NAME_COLUMN = 261
    DATA_DIMENSION = 256
    FACTOR_FRAMES = 2  # Every Nth frame will be selected. Only 2 and 4 are reasonable values. Original number of
    # frames is 8. In this case it will be reduced to 4 and 2, respectively.

    # ------------------------------------ Initialization --------------------------------------------------------------

    rgwr = GammaGWR()
    utils = Utilities()
    learning = Learning()
    args = utils.parse_arguments()

    # Get data.
    original_data = utils.load_data(args.dataset).values
    original_data_normalized = utils.normalize_data(original_data,
                                                    DATA_DIMENSION)

    original_data_day_one = original_data_normalized[np.in1d(
        original_data_normalized[:, DAY_COLUMN], ONE_DAY)]
    original_data_left_camera = original_data_day_one[np.in1d(
        original_data_day_one[:, CAMERA_COLUMN], CAMERA)]
    selected_data = original_data_left_camera[np.in1d(
        original_data_left_camera[:, CATEGORY_COLUMN], CATEGORIES)]
Exemplo n.º 3
0
        dimension = len(dataSet[1, :])

        # Data normalization
        oDataSet = np.copy(dataSet)
        for i in range(0, size[1] - 1):
            maxColumn = max(dataSet[:, i])
            minColumn = min(dataSet[:, i])
            for j in range(0, size[0]):
                oDataSet[j, i] = (dataSet[j, i] - minColumn) / (maxColumn -
                                                                minColumn)

    if (importFlag):
        file = open("myGammaGWR" + '.network', 'r')
        dataPickle = file.read()
        file.close()
        myGammaGWR = GammaGWR()
        myGammaGWR.__dict__ = cPickle.loads(dataPickle)

    if (trainFlag):
        myGammaGWR = GammaGWR()
        myGammaGWR.initNetwork(dimension, numWeights=2, numClasses=3)
        myGammaGWR.train(oDataSet,
                         labelSet,
                         maxEpochs=25,
                         insertionT=0.85,
                         beta=0.5,
                         epsilon_b=0.2,
                         epsilon_n=0.001)

    if (saveFlag):
        file = open("myGammaGWR" + '.network', 'w')
Exemplo n.º 4
0
    def iterative_learning(train_data, test_data, args, category_column):
        rgwr = GammaGWR()
        utils = Utilities()

        utils.remove_files(FILES_FOLDER)  # Clear the directory for new data.

        train_accuracies = []
        test_accuracies = []
        mini_batch_size = 5

        iterations = 10
        all_object_classes = np.unique(train_data[:, category_column])
        random.shuffle(all_object_classes)

        rgwr.initNetwork(DATA_DIMENSION,
                         numClasses=args.num_classes,
                         numWeights=args.num_weights)

        for iteration in range(0, iterations):
            objects_to_learn = all_object_classes[mini_batch_size *
                                                  iteration:mini_batch_size *
                                                  iteration + mini_batch_size]

            # Learn the model.
            train_data_prepared = train_data[np.in1d(
                train_data[:, category_column], objects_to_learn)]

            train_dataset, train_dimension, train_labelSet = utils.describe_data(
                train_data_prepared, args.dataset_name)
            rgwr.train(train_dataset,
                       train_labelSet,
                       maxEpochs=args.epochs,
                       insertionT=args.threshold,
                       beta=CONTEXT_BETA,
                       epsilon_b=LEARNING_RATE_B,
                       epsilon_n=LEARNING_RATE_N)

            # Test the model.
            test_dataset, test_dimension, test_labelSet = utils.describe_data(
                test_data, args.dataset_name)
            train_accuracy = rgwr.evaluate_model(rgwr,
                                                 train_dataset,
                                                 train_labelSet,
                                                 mode='train')
            test_accuracy = rgwr.evaluate_model(rgwr,
                                                test_dataset,
                                                test_labelSet,
                                                mode='test')

            train_accuracies.append(train_accuracy)
            test_accuracies.append(test_accuracy)

            utils.save_predicted_metrics(rgwr, iteration)

            if iteration == 0:
                number_neurons = pickle.load(
                    open("./saved_data/num_neurons" + str(iteration) + '.file',
                         "rb"))
            else:
                previous_neurons = pickle.load(
                    open("./saved_data/num_neurons" + str(iteration) + '.file',
                         "rb"))
                number_neurons = np.append(number_neurons, previous_neurons)

        with open('./saved_data/test_accuracies.file', "wb") as f:
            pickle.dump(test_accuracies, f, pickle.HIGHEST_PROTOCOL)
        with open('./saved_data/train_accuracies.file', "wb") as f:
            pickle.dump(train_accuracies, f, pickle.HIGHEST_PROTOCOL)
        with open("./saved_data/num_neurons.file", "wb") as f:
            pickle.dump(number_neurons, f, pickle.HIGHEST_PROTOCOL)

        print("Object classes order: ", all_object_classes)
        print("Train accuracies: ", train_accuracies)
        print("Test accuracies: ", test_accuracies)
Exemplo n.º 5
0
    # Export pickled network
    export_flag = False
    # Plot network (2D projection)
    plot_flag = True

    if data_flag:
        ds_iris = gtls.IrisDataset(file='iris.csv', normalize=True)
        print("%s from %s loaded." % (ds_iris.name, ds_iris.file))

    if import_flag:
        fname = 'my_net.ggwr'
        my_net = gtls.import_network(fname, GammaGWR)

    if train_flag:
        # Create network
        my_net = GammaGWR()
        # Initialize network with two neurons
        my_net.init_network(ds=ds_iris, random=False, num_context=1)
        # Train network on dataset
        my_net.train_ggwr(ds=ds_iris,
                          epochs=15,
                          a_threshold=0.85,
                          beta=0.7,
                          l_rates=[0.2, 0.001])

    if test_flag:
        my_net.test_gammagwr(ds_iris, test_accuracy=True)
        print("Accuracy on test-set: %s" % my_net.test_accuracy)

    if export_flag:
        fname = 'my_net.ggwr'
Exemplo n.º 6
0
    # Used for plotting.
    CATEGORIES_50_NICO = [
        'book', 'book', 'book', 'book', 'book', 'book', 'book', 'book', 'book',
        'book', 'hairbrush', 'hairbrush', 'hairbrush', 'hairbrush',
        'hairbrush', 'hairbrush', 'hairbrush', 'hairbrush', 'hairbrush',
        'hairbrush', 'hair clip', 'hair clip', 'hair clip', 'hair clip',
        'hair clip', 'hair clip', 'hair clip', 'hair clip', 'hair clip',
        'hair clip', 'flower', 'flower', 'flower', 'flower', 'flower',
        'flower', 'flower', 'flower', 'flower', 'flower', 'glass', 'glass',
        'glass', 'glass', 'glass', 'glass', 'glass', 'glass', 'glass', 'glass'
    ]

    # ------------------------------------ Initialization --------------------------------------------------------------

    rgwr = GammaGWR()
    utils = Utilities()
    learning = Learning()
    args = utils.parse_arguments()

    # Get data.
    original_data = utils.load_data(args.dataset).values
    original_data_normalized = utils.normalize_data(original_data,
                                                    DATA_DIMENSION)
    # original_data_normalized = original_data

    # Get training data.
    train_data = original_data_normalized[np.in1d(
        original_data_normalized[:, SESSION_COLUMN], TRAIN_SESSIONS)]
    train_data = train_data[np.in1d(train_data[:, INSTANCE_COLUMN],
                                    TRAIN_INSTANCES)]