def test_hoeffding_tree(): stream = RegressionGenerator(n_samples=500, n_features=20, n_informative=15, random_state=1) stream.prepare_for_use() learner = HoeffdingAdaptiveTreeRegressor(leaf_prediction='mean', random_state=1) cnt = 0 max_samples = 500 y_pred = array('d') y_true = array('d') wait_samples = 10 while cnt < max_samples: X, y = stream.next_sample() # Test every n samples if (cnt % wait_samples == 0) and (cnt != 0): y_pred.append(learner.predict(X)[0]) y_true.append(y[0]) learner.partial_fit(X, y) cnt += 1 expected_predictions = array('d', [ 102.38946041769101, 55.6584574987656, 5.746076599168373, 17.11797209372667, 2.566888222752787, 9.188247802192826, 17.87894804676911, 15.940629626883966, 8.981172175448485, 13.152624115190092, 11.106058099429399, 6.473195313058236, 4.723621479590173, 13.825568609556493, 8.698873073880696, 1.6452441811010252, 5.123496188584294, 6.34387187194982, 5.9977733790395105, 6.874251577667707, 4.605348088338317, 8.20112636572672, 9.032631648758098, 4.428189978974459, 4.249801041367518, 9.983272668044492, 12.859518508979734, 11.741395774380285, 11.230028410261868, 9.126921979081521, 9.132146661688296, 7.750655625124709, 6.445145118245414, 5.760928671876355, 4.041291302080659, 3.591837600560529, 0.7640424010500604, 0.1738639840537784, 2.2068337802212286, -81.05302946841077, 96.17757415335177, -77.35894903819677, 95.85568683733698, 99.1981674250886, 99.89327888035015, 101.66673013734784, -79.1904234513751, -80.42952143783687, 100.63954789983896 ]) assert np.allclose(y_pred, expected_predictions) error = mean_absolute_error(y_true, y_pred) expected_error = 143.11351404083086 assert np.isclose(error, expected_error) expected_info = "HoeffdingAdaptiveTreeRegressor(binary_split=False, grace_period=200, leaf_prediction='mean', " \ "learning_ratio_const=True, learning_ratio_decay=0.001, learning_ratio_perceptron=0.02, " \ "max_byte_size=33554432, memory_estimate_period=1000000, nb_threshold=0, no_preprune=False, " \ "nominal_attributes=None, random_state=1, remove_poor_atts=False, split_confidence=1e-07, " \ "stop_mem_management=False, tie_threshold=0.05)" info = " ".join([line.strip() for line in learner.get_info().split()]) assert info == expected_info assert isinstance(learner.get_model_description(), type('')) assert type(learner.predict(X)) == np.ndarray
def test_hoeffding_adaptive_tree_regressor_perceptron(): stream = RegressionGenerator(n_samples=500, n_features=20, n_informative=15, random_state=1) learner = HoeffdingAdaptiveTreeRegressor(leaf_prediction='perceptron', random_state=1) cnt = 0 max_samples = 500 y_pred = array('d') y_true = array('d') wait_samples = 10 while cnt < max_samples: X, y = stream.next_sample() # Test every n samples if (cnt % wait_samples == 0) and (cnt != 0): y_pred.append(learner.predict(X)[0]) y_true.append(y[0]) learner.partial_fit(X, y) cnt += 1 expected_predictions = array('d', [207.20901655684412, 106.30316877540555, 101.46950096324191, 114.38162776688861, 48.40271620592212, -79.94375846313639, -76.69182794940929, 88.38425569670662, -13.92372162581644, 3.0549887923350507, 55.36276732455883, 32.0512081208464, 17.54953203218902, -1.7305966738232161, 43.54548690756897, 8.502241407478213, -61.14739038895263, 50.528736810827745, 9.679668917948607, 89.93098085572623, 85.1994809437223, 1.8721866382932664, -7.1972581323107825, -45.86230662663542, 3.111671172363243, 57.921908276916646, 61.43400576850072, -16.61695641848216, -6.0769944259948065, 19.929266442289546, -60.972801351912224, -0.3342549973033524, -50.53334350658139, -14.885488543743078, -13.255920225124637, 28.909916365484275, -103.03499425386107, -36.44921969674884, -15.40018796932204, -84.98471039676006, 38.270205984888065, -62.97228157481581, -48.095864628804044, 95.5028130171316, 73.62390886812497, 152.7135140597221, -120.4662342226783, -77.68182541723442, 66.82059046110074]) assert np.allclose(y_pred, expected_predictions) error = mean_absolute_error(y_true, y_pred) expected_error = 126.11208652969131 assert np.isclose(error, expected_error) expected_info = "HoeffdingAdaptiveTreeRegressor(binary_split=False, grace_period=200, " \ "leaf_prediction='perceptron', learning_ratio_const=True, learning_ratio_decay=0.001, " \ "learning_ratio_perceptron=0.02, max_byte_size=33554432, memory_estimate_period=1000000, " \ "no_preprune=False, nominal_attributes=None, random_state=1, " \ "remove_poor_atts=False, split_confidence=1e-07, stop_mem_management=False, tie_threshold=0.05)" info = " ".join([line.strip() for line in learner.get_info().split()]) assert info == expected_info assert isinstance(learner.get_model_description(), type('')) assert type(learner.predict(X)) == np.ndarray assert learner._estimator_type == 'regressor'
def test_hoeffding_tree_perceptron(): stream = RegressionGenerator(n_samples=500, n_features=20, n_informative=15, random_state=1) stream.prepare_for_use() learner = HoeffdingAdaptiveTreeRegressor(leaf_prediction='perceptron', random_state=1) cnt = 0 max_samples = 500 y_pred = array('d') y_true = array('d') wait_samples = 10 while cnt < max_samples: X, y = stream.next_sample() # Test every n samples if (cnt % wait_samples == 0) and (cnt != 0): y_pred.append(learner.predict(X)[0]) y_true.append(y[0]) learner.partial_fit(X, y) cnt += 1 expected_predictions = array('d', [ 1198.4326121743168, 456.36607750881586, 927.9912160545144, 1160.4797981899128, 506.50541829176535, -687.8187227095925, -677.8120094065415, 231.14888704761225, -284.46324039942937, -255.69195985557175, 47.58787439365423, -135.22494016284043, -10.351457437330152, 164.95903200643997, 360.72854984472383, 193.30633911830088, -64.23638301570358, 587.9771578214296, 649.8395655757931, 481.01214222804026, 305.4402728117724, 266.2096493865043, -445.11447171009775, -567.5748694154349, -68.70070048021438, -446.79910655850153, -115.892348067663, -98.26862866231015, 71.04707905920286, -10.239274802165584, 18.748731569441812, 4.971217265129857, 172.2223575990573, -655.2864976783711, -129.69921313686626, -114.01187375876822, -405.66166686550963, -215.1264381928009, -345.91020370426247, -80.49330468453074, 108.78958382083302, 134.95267043280126, -398.5273538477553, -157.1784910649728, 219.72541225645654, -100.91598162899217, 80.9768574308987, -296.8856956382453, 251.9332271253148 ]) assert np.allclose(y_pred, expected_predictions) error = mean_absolute_error(y_true, y_pred) expected_error = 362.98595964244623 assert np.isclose(error, expected_error) expected_info = "HoeffdingAdaptiveTreeRegressor(binary_split=False, grace_period=200, " \ "leaf_prediction='perceptron', learning_ratio_const=True, learning_ratio_decay=0.001, " \ "learning_ratio_perceptron=0.02, max_byte_size=33554432, memory_estimate_period=1000000, " \ "nb_threshold=0, no_preprune=False, nominal_attributes=None, random_state=1, " \ "remove_poor_atts=False, split_confidence=1e-07, stop_mem_management=False, tie_threshold=0.05)" info = " ".join([line.strip() for line in learner.get_info().split()]) assert info == expected_info assert isinstance(learner.get_model_description(), type('')) assert type(learner.predict(X)) == np.ndarray assert learner._estimator_type == 'regressor'
def test_hoeffding_adaptive_tree_regressor_perceptron(): stream = RegressionGenerator(n_samples=500, n_features=20, n_informative=15, random_state=1) learner = HoeffdingAdaptiveTreeRegressor(leaf_prediction='perceptron', random_state=1) cnt = 0 max_samples = 500 y_pred = array('d') y_true = array('d') wait_samples = 10 while cnt < max_samples: X, y = stream.next_sample() # Test every n samples if (cnt % wait_samples == 0) and (cnt != 0): y_pred.append(learner.predict(X)[0]) y_true.append(y[0]) learner.partial_fit(X, y) cnt += 1 expected_predictions = array('d', [ -106.84237763060068, -10.965517384802226, -180.90711470797237, -218.20896751607663, -96.4271589961865, 110.51551963099622, 108.34616947202511, 30.1720109214627, 57.92205878998479, 77.82418885914053, 49.972060923364765, 68.56117081695875, 15.996949915551697, -34.22744443808294, -19.762696110319702, -28.447329394752995, -50.62864370485592, -47.37357781048561, -99.82613515424342, 13.985531117918336, 41.41709671929987, -34.679807275938174, 62.75626094547859, 30.925078688018893, 12.130320819235365, 119.3648998377624, 82.96422756064737, -6.920397563039609, -12.701774870569059, 24.883730398016034, -74.22855883237567, -0.8012436194087567, -83.03683748750394, 46.737839617687854, 0.537404558240671, 48.53591837633138, -86.2259777783834, -24.985514024179967, 6.396035456152859, -90.19454995571908, 32.05821807667601, -83.08553684151566, -28.32223999320023, 113.28916673506842, 68.10498750807977, 173.9146410394573, -150.2067507947196, -74.10346402222962, 54.39153137687993 ]) assert np.allclose(y_pred, expected_predictions) error = mean_absolute_error(y_true, y_pred) expected_error = 115.78916175164417 assert np.isclose(error, expected_error) expected_info = "HoeffdingAdaptiveTreeRegressor(binary_split=False, grace_period=200, " \ "leaf_prediction='perceptron', learning_ratio_const=True, learning_ratio_decay=0.001, " \ "learning_ratio_perceptron=0.02, max_byte_size=33554432, memory_estimate_period=1000000, " \ "no_preprune=False, nominal_attributes=None, random_state=1, " \ "remove_poor_atts=False, split_confidence=1e-07, stop_mem_management=False, tie_threshold=0.05)" info = " ".join([line.strip() for line in learner.get_info().split()]) assert info == expected_info assert isinstance(learner.get_model_description(), type('')) assert type(learner.predict(X)) == np.ndarray assert learner._estimator_type == 'regressor'