def batch_learning(train_data, test_data, args): rgwr = GammaGWR() utils = Utilities() utils.remove_files(FILES_FOLDER) # Clear the directory for new data. # Learn the model. train_dataset, train_dimension, train_labelSet = utils.describe_data( train_data, args.dataset_name) rgwr.initNetwork(train_dimension, numClasses=args.num_classes, numWeights=args.num_weights) rgwr.train(train_dataset, train_labelSet, maxEpochs=args.epochs, insertionT=args.threshold, beta=CONTEXT_BETA, epsilon_b=LEARNING_RATE_B, epsilon_n=LEARNING_RATE_N) # Test the model. test_dataset, test_dimension, test_labelSet = utils.describe_data( test_data, args.dataset_name) rgwr.evaluate_model(rgwr, train_dataset, train_labelSet, mode='train') rgwr.evaluate_model(rgwr, test_dataset, test_labelSet, mode='test') utils.save_predicted_metrics(rgwr, '')
minColumn) if (importFlag): file = open("myGammaGWR" + '.network', 'r') dataPickle = file.read() file.close() myGammaGWR = GammaGWR() myGammaGWR.__dict__ = cPickle.loads(dataPickle) if (trainFlag): myGammaGWR = GammaGWR() myGammaGWR.initNetwork(dimension, numWeights=2, numClasses=3) myGammaGWR.train(oDataSet, labelSet, maxEpochs=25, insertionT=0.85, beta=0.5, epsilon_b=0.2, epsilon_n=0.001) if (saveFlag): file = open("myGammaGWR" + '.network', 'w') file.write(cPickle.dumps(myGammaGWR.__dict__)) file.close() if (testFlag): bmuWeights, bmuActivation, bmuLabel = myGammaGWR.predict(oDataSet) predictedLabels, accuracy = myGammaGWR.predictLabels( bmuLabel, labelSet) print "Classification accuracy: " + str(accuracy)
def iterative_learning(train_data, test_data, args, category_column): rgwr = GammaGWR() utils = Utilities() utils.remove_files(FILES_FOLDER) # Clear the directory for new data. train_accuracies = [] test_accuracies = [] mini_batch_size = 5 iterations = 10 all_object_classes = np.unique(train_data[:, category_column]) random.shuffle(all_object_classes) rgwr.initNetwork(DATA_DIMENSION, numClasses=args.num_classes, numWeights=args.num_weights) for iteration in range(0, iterations): objects_to_learn = all_object_classes[mini_batch_size * iteration:mini_batch_size * iteration + mini_batch_size] # Learn the model. train_data_prepared = train_data[np.in1d( train_data[:, category_column], objects_to_learn)] train_dataset, train_dimension, train_labelSet = utils.describe_data( train_data_prepared, args.dataset_name) rgwr.train(train_dataset, train_labelSet, maxEpochs=args.epochs, insertionT=args.threshold, beta=CONTEXT_BETA, epsilon_b=LEARNING_RATE_B, epsilon_n=LEARNING_RATE_N) # Test the model. test_dataset, test_dimension, test_labelSet = utils.describe_data( test_data, args.dataset_name) train_accuracy = rgwr.evaluate_model(rgwr, train_dataset, train_labelSet, mode='train') test_accuracy = rgwr.evaluate_model(rgwr, test_dataset, test_labelSet, mode='test') train_accuracies.append(train_accuracy) test_accuracies.append(test_accuracy) utils.save_predicted_metrics(rgwr, iteration) if iteration == 0: number_neurons = pickle.load( open("./saved_data/num_neurons" + str(iteration) + '.file', "rb")) else: previous_neurons = pickle.load( open("./saved_data/num_neurons" + str(iteration) + '.file', "rb")) number_neurons = np.append(number_neurons, previous_neurons) with open('./saved_data/test_accuracies.file', "wb") as f: pickle.dump(test_accuracies, f, pickle.HIGHEST_PROTOCOL) with open('./saved_data/train_accuracies.file', "wb") as f: pickle.dump(train_accuracies, f, pickle.HIGHEST_PROTOCOL) with open("./saved_data/num_neurons.file", "wb") as f: pickle.dump(number_neurons, f, pickle.HIGHEST_PROTOCOL) print("Object classes order: ", all_object_classes) print("Train accuracies: ", train_accuracies) print("Test accuracies: ", test_accuracies)
rgwr.initNetwork(DATA_DIMENSION, numClasses=args.num_classes, numWeights=args.num_weights) activation_mean = 0 activation_sd = 0 for i in all_object_classes[:30]: # Learn the model. train_data_prepared = train_data[np.in1d( train_data[:, INSTANCE_COLUMN], i)] train_dataSet, train_dimension, train_labelSet = utils.describe_data( train_data_prepared, args.dataset_name) rgwr.train(train_dataSet, train_labelSet, maxEpochs=args.epochs, insertionT=args.threshold, beta=CONTEXT_BETA, epsilon_b=LEARNING_RATE_B, epsilon_n=LEARNING_RATE_N) learnt_objects.append(i) train_data_prepared = train_data[np.in1d( train_data[:, INSTANCE_COLUMN], learnt_objects)] train_dataSet, train_dimension, train_labelSet = utils.describe_data( train_data_prepared, args.dataset_name) novelty = utils.check_novelty(rgwr, train_dataSet, mode='train') activation_mean = novelty[0] activation_sd = novelty[1] random.shuffle(all_object_classes) iteration = 1