def __init__(self):
        np.random.seed(1)

        self.classifier = BayesClassifier(3)
        self.classifier.add_class(Normal(), 0)
        self.classifier.add_class(Normal(), 1)
        self.classifier.add_class(Normal(), 2)

        self.main()
    def __init__(self):
        np.random.seed(1)

        gamma = Gamma()
        gamma.set_estimators(2)

        self.classifier = BayesClassifier(2)
        self.classifier.add_class(Normal(), 0)
        self.classifier.add_class(gamma, 1)

        self.main()
class Program():
    """
    Object responsible for running the program and controlling the overall
    program flow.
    """
    def __init__(self):
        np.random.seed(1)

        gamma = Gamma()
        gamma.set_estimators(2)

        self.classifier = BayesClassifier(2)
        self.classifier.add_class(Normal(), 0)
        self.classifier.add_class(gamma, 1)

        self.main()

    def main(self):
        """
        Overall main function of the program. Generates data, trains model
        and measure accuracy of the model.
        """
        x_train, y_train = self.generate_normal_data(600)
        x_test, y_test = self.generate_normal_data(300, plot=False)

        self.classifier.fit(x_train, y_train)
        acc = self.classifier.accuracy(x_test, y_test)
        print("accuracy: ", acc)

    def generate_normal_data(self, N, plot=True):
        """
        Generate N data samples of normal distributed data for 1 class and
        gamma distributed data from 1 class.

        param N: total number of samples to generate
        type N: int
        param plot: True if the dataset is to be plotted
        type plot: bool, optional
        return: tuple of dataset and corresponding labels
        rtype: tuple
        """
        mu = 23
        sigma = 5
        shape = 2
        scale = 2

        x1 = np.random.normal(mu, sigma, int(N / 3))
        x2 = np.random.gamma(scale, shape, int(N / 3))

        x = np.concatenate((x1, x2))
        y = np.concatenate((np.zeros(int(N / 3)), np.ones(int(N / 3))))

        if plot:
            plt.hist(x2, 50, density=True)
            plt.hist(x1, 50, density=True)
            plt.show()

        return x, y
 def test_remove_class_counter(self):
     # Ensure that counter of added classes is correct after remove_class()
     # function has been called
     classifier = BayesClassifier(2)
     classifier.add_class(Normal(), 1)
     classifier.remove_class(1)
     assert classifier.added_classes == 0
 def test_remove_class(self):
     # Ensure that distribution is removed from list after remove_class()
     # function has been called
     classifier = BayesClassifier(2)
     classifier.add_class(Normal(), 1)
     classifier.remove_class(1)
     assert classifier.distributions[1] == None
 def create_fit_model(self):
     # Create model
     classifier = BayesClassifier(2)
     classifier.add_class(MultivariateNormal(), 0)
     classifier.add_class(MultivariateNormal(), 1)
     x, y = self.create_data()
     # Train and return model
     classifier.fit(x, y)
     return classifier
 def test_add_class(self):
     dist = Normal()
     classifier = BayesClassifier(3)
     classifier.add_class(dist, 0)
     assert classifier.distributions[0] == dist
Exemple #8
0
class Program():
    """
    Object responsible for running the program and controlling the overall
    program flow.
    """
    def __init__(self):
        self.classifier = BayesClassifier(2)
        self.classifier.add_class(MultivariateNormal(), 0)
        self.classifier.add_class(MultivariateNormal(), 1)

        self.main()

    def main(self):
        """
        Overall main function of the program. Generates data, trains model
        and measure accuracy of the model.
        """
        x_train, y_train = self.generate_multivariate_data(100, plot=True)
        x_test, y_test = self.generate_multivariate_data(50, plot=True)

        self.classifier.fit(x_train, y_train)
        acc = self.classifier.accuracy(x_test, y_test)
        print("accuracy: ", acc)

    def generate_multivariate_data(self, N, plot=True):
        """
        Generate N multivariate gaussian samples

        param N: total number of samples to generate
        type N: int
        param plot: True if the dataset is to be plotted
        type plot: bool, optional
        return: tuple of dataset and corresponding labels
        rtype: tuple
        """
        mu1 = np.array([1, 1])
        mu2 = np.array([2.0, 2.0])
        sigma = np.array([[0.2, 0.0], [0.0, 0.2]])

        x1 = np.random.multivariate_normal(mu1, sigma, int(N / 2))
        x2 = np.random.multivariate_normal(mu2, sigma, int(N / 2))
        x = np.concatenate((x1, x2))
        y = np.concatenate((np.zeros(int(N / 2)), np.ones(int(N / 2))))

        if plot:
            plt.figure(1, figsize=(8, 8))
            plt.scatter(x1[:, 0],
                        x1[:, 1],
                        s=120,
                        facecolors='none',
                        edgecolors='black',
                        linewidth=3.0,
                        label='Class 1')
            plt.scatter(x2[:, 0],
                        x2[:, 1],
                        s=120,
                        facecolors='none',
                        edgecolors='blue',
                        linewidth=3.0,
                        label='Class 2')
            plt.legend()
            plt.show()

        return x, y
Exemple #9
0
    def __init__(self):
        self.classifier = BayesClassifier(2)
        self.classifier.add_class(MultivariateNormal(), 0)
        self.classifier.add_class(MultivariateNormal(), 1)

        self.main()
 def test_invalid_dist_add_class(self):
     # Ensure that exception is raised when trying to add invalid
     # class/distribution in the add_class() function
     classifier = BayesClassifier(2)
     self.assertRaises(Exception, classifier.add_class, 1, 1)
class Program():
    """
    Object responsible for running the program and controlling the overall
    program flow.
    """
    def __init__(self):
        np.random.seed(1)

        self.classifier = BayesClassifier(3)
        self.classifier.add_class(Normal(), 0)
        self.classifier.add_class(Normal(), 1)
        self.classifier.add_class(Normal(), 2)

        self.main()

    def main(self):
        """
        Overall main function of the program. Generates data, trains model
        and measure accuracy of the model.
        """
        x_train, y_train = self.generate_normal_data(60)
        x_test, y_test = self.generate_normal_data(30, plot=False)

        self.classifier.fit(x_train, y_train)
        acc = self.classifier.accuracy(x_test, y_test)
        print("accuracy: ", acc)

    def generate_normal_data(self, N, plot=True):
        """
        Generate N data samples of normal distributed data for 3 classes.

        param N: total number of samples to generate
        type N: int
        param plot: True if the dataset is to be plotted
        type plot: bool, optional
        return: tuple of dataset and corresponding labels
        rtype: tuple
        """
        mu1 = 2.5
        mu2 = 4.3
        mu3 = 6.2
        sigma1 = 0.3
        sigma2 = 0.5
        sigma3 = 0.9

        x1 = np.random.normal(mu1, sigma1, int(N / 3))
        x2 = np.random.normal(mu2, sigma2, int(N / 3))
        x3 = np.random.normal(mu3, sigma3, int(N / 3))

        x = np.concatenate((x1, x2, x3))
        y = np.concatenate(
            (np.zeros(int(N / 3)), np.ones(int(N / 3)), np.full((int(N / 3)),
                                                                2)))

        if plot:
            plt.figure(1, figsize=(8, 8))
            plt.scatter(x1,
                        np.zeros(x1.shape),
                        s=120,
                        facecolors='none',
                        edgecolors='black',
                        linewidth=3.0,
                        label='Class 1')
            plt.scatter(x2,
                        np.zeros(x2.shape),
                        s=120,
                        facecolors='none',
                        edgecolors='blue',
                        linewidth=3.0,
                        label='Class 2')
            plt.scatter(x3,
                        np.zeros(x3.shape),
                        s=120,
                        facecolors='none',
                        edgecolors='red',
                        linewidth=3.0,
                        label='Class 3')
            plt.legend()
            plt.show()

        return x, y
 def test_num_classes(self):
     # Ensure that num_classes is set upon construction
     classifier = BayesClassifier(2)
     assert classifier.num_classes == 2
 def test_unset_log_pdf(self):
     # Ensure that set_log_pdf() function sets use_log_pdf attribute to
     # True
     classifier = BayesClassifier(2)
     classifier.unset_log_pdf()
     assert classifier.use_log_pdf == False
 def create_unfit_model(self):
     classifier = BayesClassifier(2)
     classifier.add_class(MultivariateNormal(), 0)
     classifier.add_class(MultivariateNormal(), 1)
     return classifier
 def test_accuracy_exception(self):
     # Ensure that exception is raised when trying to measure the accuracy
     # before training the model
     classifier = BayesClassifier(2)
     self.assertRaises(Exception, classifier.accuracy, [1, 1], [0, 0])
 def test_predict_exception(self):
     # Ensure that exception is raised when trying to predict before
     # training the model
     classifier = BayesClassifier(2)
     self.assertRaises(Exception, classifier.predict, 1)
 def test_fit_exception(self):
     # Ensure that exception is raised when trying to train the model
     # without adding all distributions/classes
     classifier = BayesClassifier(2)
     classifier.add_class(MultivariateNormal(), 1)
     self.assertRaises(Exception, classifier.fit, [0, 0], [1, 1])
 def test_remove_class_exception_exceeding_idx(self):
     # Ensure that exception is raised when trying to remove class
     # on negative invalid idx in the remove_class() function
     classifier = BayesClassifier(2)
     self.assertRaises(Exception, classifier.remove_class, 2)
 def test_add_class_counter(self):
     classifier = BayesClassifier(5)
     classifier.add_class(Normal(), 0)
     assert classifier.added_classes == 1
 def test_exceeding_idx_add_class(self):
     # Ensure that exception is raised when trying to add distribution
     # on an exceeding idx in the add_class() function
     classifier = BayesClassifier(4)
     self.assertRaises(Exception, classifier.add_class, Normal(), 5)
Exemple #21
0
polish_texts = ['dramat', 'popul', 'proza', 'publ', 'wp']
path_to_files = './data/'
show_simmilar = True;
num_of_simmilar = 4

if __name__ == '__main__':
    textprocessor = TextProcessor()
    textprocessor.create_dictionary(path_to_file = path_to_files, form_file=form_file)
    textprocessor.improve_dictionary(path_to_files = path_to_files, polish_texts=polish_texts)
    dict_of_words = textprocessor.dict_of_words;

    input_word = input('Napisz pojedyncze slowo:\n')
    start = time.time();
    input_word = textprocessor.map_chars(input_word)
    if not input_word in dict_of_words:
        bayes_classifier = BayesClassifier()
        simmilar_words = bayes_classifier.calculate(f'{input_word}', dict_of_words)
        unmapped_words = []
        for word in simmilar_words:
            unmapped_words.append(textprocessor.unmap_words(word))
        print(f'Slowo nie wystepuje w polskim jezyku.')
        print(f'Moze chodzilo o \'{unmapped_words[0]}\'?')
        show_hints = input('Chcesz zobaczyc inne mozliwosci? t/n\n')
        if(show_hints == 't'):
            print(f'Inne możliwosci {unmapped_words[1:num_of_simmilar]}\n')
    else:
        print(f'Slowo {input_word} wystepuje w polskim jezyku.')

    end = time.time()
    print(f'Czas egzekucji: {end-start}')
Exemple #22
0
 def __init__(self, seed_tuple, timeout):
     super(MessageHandler, self).__init__(seed_tuple, timeout)
     MessageHandler.classifier = BayesClassifier('data_examples/', '_')