Exemple #1
0
def dataSetTestATest(cov, means):
    svg = ClassificationValuesGenerator(0, 30)
    return svg.getSyntheticValuesForClassificationWithMeans(
        [50] * results.numberOfClasses, cov, means)
Exemple #2
0
parser.add_argument(
    "-e",
    action="store",
    dest="testUsingTrainingData",
    type=int,
    default=1,
    help=
    "1: Test de classifier using a different data set. 0: Test using the training data set."
)

if __name__ == "__main__":
    results = parser.parse_args()
    if results.numberOfClasses > 1:
        numberOfDataPerClass = np.random.uniform(80, 100,
                                                 results.numberOfClasses)
        svg = ClassificationValuesGenerator(0, 30)
        values = svg.getSyntheticValuesForClassification(numberOfDataPerClass)

        trainingData = values[0]
        cov = values[1]
        means = values[2]

        classificator = LinearClassificator()
        classificator.findW(trainingData)

        classificable = []
        classificated = [[] for i in range(0, results.numberOfClasses)]

        if results.testUsingTrainingData == 0:
            testData = trainingData
        else:
Exemple #3
0
def dataSetTestATraining():
    numberOfDataPerClass = np.random.uniform(80, 100, results.numberOfClasses)
    svg = ClassificationValuesGenerator(0, 30)
    return svg.getSyntheticValuesForClassification(numberOfDataPerClass)
def dataSetTestATrainingWithFixedDistribution():
    numberOfDataPerClass = np.random.uniform(80, 100, 2)
    svg = ClassificationValuesGenerator()
    cov = np.array([[1, 0], [0, 1]])
    means = [[0, 0], [0, 4]]
    return [svg.getSyntheticValuesForClassificationWithMeans(numberOfDataPerClass, cov, means), cov, means]
def dataSetTestATest(cov, means):
    svg = ClassificationValuesGenerator(0, 1)
    return svg.getSyntheticValuesForClassificationWithMeans([50] * 2, cov, means)
def dataSetTestBTraining(dim):
    numberOfDataPerClass = np.random.uniform(80, 100, 3)
    svg = ClassificationValuesGenerator(0, 10)
    return svg.getSyntheticValuesForClassification(numberOfDataPerClass, dim)
parser = argparse.ArgumentParser(description="Mixture of Gaussians of K classes with D = 2.")
parser.add_argument("-k", action="store", dest="numberOfClasses", type=int, default=2,
                    help="Number of classses.")
parser.add_argument("-t", action="store", dest="testsToRun", type=int, default=0,
                    help="0: run EM and K-Means. 1: Will run only EM. 2: Will run only K-Means.")
parser.add_argument("-em", action="store", dest="numberOfIterationsOfEM", type=int, default=2000,
                    help="Number of iterations of EM.")
parser.add_argument("-km", action="store", dest="numberOfIterationsOfKMeans", type=int, default=200,
                    help="Number of iterations of K-Means.")

if __name__ == "__main__":
    results = parser.parse_args()
    if results.numberOfClasses > 1:
        numberOfDataPerClass = np.random.uniform(80, 100, results.numberOfClasses)
        svg = ClassificationValuesGenerator(0, 10)
        values = svg.getSyntheticValuesForClassification(numberOfDataPerClass)
 
        trainingData = values[0]
        cov = values[1]
        means = values[2]
 
        classificable = []
        for cl in trainingData:
            classificable.extend(cl)

        shuffle(classificable)

        if results.testsToRun == 0 or results.testsToRun == 1:
            classificator1 = EM()
            classificator1.expectationMaximization(classificable, results.numberOfClasses, results.numberOfIterationsOfEM)