def test_als_binpickle(tmp_path): "Test saving ALS with BinPickle" original = als.BiasedMF(20, iterations=5, method='lu') ratings = lktu.ml_test.ratings original.fit(ratings) assert original.global_bias_ == approx(ratings.rating.mean()) file = tmp_path / 'als.bpk' binpickle.dump(original, file) with binpickle.BinPickleFile(file) as bpf: # the pickle data should be small _log.info('serialized to %d pickle bytes', bpf.entries[-1].dec_length) pickle_dis(bpf._read_buffer(bpf.entries[-1])) assert bpf.entries[-1].dec_length < 1024 algo = bpf.load() assert algo.global_bias_ == original.global_bias_ assert np.all(algo.user_bias_ == original.user_bias_) assert np.all(algo.item_bias_ == original.item_bias_) assert np.all(algo.user_features_ == original.user_features_) assert np.all(algo.item_features_ == original.item_features_) assert np.all(algo.item_index_ == original.item_index_) assert np.all(algo.user_index_ == original.user_index_)
def test_als_binpickle(tmp_path): "Test saving ALS with BinPickle" original = als.BiasedMF(20, iterations=5, method='lu') ratings = lktu.ml_test.ratings original.fit(ratings) assert original.bias.mean_ == approx(ratings.rating.mean()) file = tmp_path / 'als.bpk' binpickle.dump(original, file) with binpickle.BinPickleFile(file) as bpf: # the pickle data should be small _log.info('serialized to %d pickle bytes', bpf.entries[-1].dec_length) pickle_dis(bpf._read_buffer(bpf.entries[-1])) assert bpf.entries[-1].dec_length < 2048 algo = bpf.load() assert algo.bias.mean_ == original.bias.mean_ assert np.all(algo.bias.user_offsets_ == original.bias.user_offsets_) assert np.all(algo.bias.item_offsets_ == original.bias.item_offsets_) assert np.all(algo.user_features_ == original.user_features_) assert np.all(algo.item_features_ == original.item_features_) assert np.all(algo.item_index_ == original.item_index_) assert np.all(algo.user_index_ == original.user_index_) # make sure it still works preds = algo.predict_for_user(10, np.arange(0, 50, dtype='i8')) assert len(preds) == 50