def load(self, filename): global classifier classifier = Classifier.load(filename) print "Classifier is Loaded"
def test_IBk(self): # Train a classifier. print('Training IBk classifier...') c = Classifier(name='weka.classifiers.lazy.IBk', ckargs={'-K': 1}) training_fn = os.path.join(BP, 'fixtures/abalone-train.arff') c.train(training_fn, verbose=1) self.assertTrue(c._model_data) # Make a valid query. print('Using IBk classifier...') query_fn = os.path.join(BP, 'fixtures/abalone-query.arff') predictions = list(c.predict(query_fn, verbose=1, cleanup=0)) pred0 = predictions[0] print('pred0:', pred0) pred1 = PredictionResult(actual=None, predicted=7, probability=None) print('pred1:', pred1) self.assertEqual(pred0, pred1) # Make a valid query. with self.assertRaises(PredictionError): query_fn = os.path.join(BP, 'fixtures/abalone-query-bad.arff') predictions = list(c.predict(query_fn, verbose=1, cleanup=0)) # Make a valid query manually. query = arff.ArffFile(relation='test', schema=[ ('Sex', ('M', 'F', 'I')), ('Length', 'numeric'), ('Diameter', 'numeric'), ('Height', 'numeric'), ('Whole weight', 'numeric'), ('Shucked weight', 'numeric'), ('Viscera weight', 'numeric'), ('Shell weight', 'numeric'), ('Class_Rings', 'integer'), ]) query.append( ['M', 0.35, 0.265, 0.09, 0.2255, 0.0995, 0.0485, 0.07, '?']) data_str0 = """% @relation test @attribute 'Sex' {F,I,M} @attribute 'Length' numeric @attribute 'Diameter' numeric @attribute 'Height' numeric @attribute 'Whole weight' numeric @attribute 'Shucked weight' numeric @attribute 'Viscera weight' numeric @attribute 'Shell weight' numeric @attribute 'Class_Rings' integer @data M,0.35,0.265,0.09,0.2255,0.0995,0.0485,0.07,? """ data_str1 = query.write(fmt=DENSE) # print(data_str0 # print(data_str1 self.assertEqual(data_str0, data_str1) predictions = list(c.predict(query, verbose=1, cleanup=0)) self.assertEqual( predictions[0], PredictionResult(actual=None, predicted=7, probability=None)) # Test pickling. fn = os.path.join(BP, 'fixtures/IBk.pkl') c.save(fn) c = Classifier.load(fn) predictions = list(c.predict(query, verbose=1, cleanup=0)) self.assertEqual( predictions[0], PredictionResult(actual=None, predicted=7, probability=None)) #print('Pickle verified.') # Make a valid dict query manually. query = arff.ArffFile(relation='test', schema=[ ('Sex', ('M', 'F', 'I')), ('Length', 'numeric'), ('Diameter', 'numeric'), ('Height', 'numeric'), ('Whole weight', 'numeric'), ('Shucked weight', 'numeric'), ('Viscera weight', 'numeric'), ('Shell weight', 'numeric'), ('Class_Rings', 'integer'), ]) query.append({ 'Sex': 'M', 'Length': 0.35, 'Diameter': 0.265, 'Height': 0.09, 'Whole weight': 0.2255, 'Shucked weight': 0.0995, 'Viscera weight': 0.0485, 'Shell weight': 0.07, 'Class_Rings': arff.MISSING, }) predictions = list(c.predict(query, verbose=1, cleanup=0)) self.assertEqual( predictions[0], PredictionResult(actual=None, predicted=7, probability=None))
def test_IBk(self): # Train a classifier. print('Training IBk classifier...') c = Classifier(name='weka.classifiers.lazy.IBk', ckargs={'-K':1}) training_fn = os.path.join(BP, 'fixtures/abalone-train.arff') c.train(training_fn, verbose=1) self.assertTrue(c._model_data) # Make a valid query. print('Using IBk classifier...') query_fn = os.path.join(BP, 'fixtures/abalone-query.arff') predictions = list(c.predict(query_fn, verbose=1, cleanup=0)) pred0 = predictions[0] print('pred0:', pred0) pred1 = PredictionResult(actual=None, predicted=7, probability=None) print('pred1:', pred1) self.assertEqual(pred0, pred1) # Make a valid query. with self.assertRaises(PredictionError): query_fn = os.path.join(BP, 'fixtures/abalone-query-bad.arff') predictions = list(c.predict(query_fn, verbose=1, cleanup=0)) # Make a valid query manually. query = arff.ArffFile(relation='test', schema=[ ('Sex', ('M', 'F', 'I')), ('Length', 'numeric'), ('Diameter', 'numeric'), ('Height', 'numeric'), ('Whole weight', 'numeric'), ('Shucked weight', 'numeric'), ('Viscera weight', 'numeric'), ('Shell weight', 'numeric'), ('Class_Rings', 'integer'), ]) query.append(['M', 0.35, 0.265, 0.09, 0.2255, 0.0995, 0.0485, 0.07, '?']) data_str0 = """% @relation test @attribute 'Sex' {F,I,M} @attribute 'Length' numeric @attribute 'Diameter' numeric @attribute 'Height' numeric @attribute 'Whole weight' numeric @attribute 'Shucked weight' numeric @attribute 'Viscera weight' numeric @attribute 'Shell weight' numeric @attribute 'Class_Rings' integer @data M,0.35,0.265,0.09,0.2255,0.0995,0.0485,0.07,? """ data_str1 = query.write(fmt=DENSE) # print(data_str0 # print(data_str1 self.assertEqual(data_str0, data_str1) predictions = list(c.predict(query, verbose=1, cleanup=0)) self.assertEqual(predictions[0], PredictionResult(actual=None, predicted=7, probability=None)) # Test pickling. fn = os.path.join(BP, 'fixtures/IBk.pkl') c.save(fn) c = Classifier.load(fn) predictions = list(c.predict(query, verbose=1, cleanup=0)) self.assertEqual(predictions[0], PredictionResult(actual=None, predicted=7, probability=None)) #print('Pickle verified.') # Make a valid dict query manually. query = arff.ArffFile(relation='test', schema=[ ('Sex', ('M', 'F', 'I')), ('Length', 'numeric'), ('Diameter', 'numeric'), ('Height', 'numeric'), ('Whole weight', 'numeric'), ('Shucked weight', 'numeric'), ('Viscera weight', 'numeric'), ('Shell weight', 'numeric'), ('Class_Rings', 'integer'), ]) query.append({ 'Sex': 'M', 'Length': 0.35, 'Diameter': 0.265, 'Height': 0.09, 'Whole weight': 0.2255, 'Shucked weight': 0.0995, 'Viscera weight': 0.0485, 'Shell weight': 0.07, 'Class_Rings': arff.MISSING, }) predictions = list(c.predict(query, verbose=1, cleanup=0)) self.assertEqual(predictions[0], PredictionResult(actual=None, predicted=7, probability=None))