Exemplo n.º 1
0
 def __init__(self, *, cvar, dvar, dlscale, pvar, plscale):
     self._kern = (ks.kernel.Constant(var=cvar) + ks.kernel.Exponential(
         var=dvar, lscale=(dlscale * YEAR)) + ks.kernel.PeriodicExponential(
             var=pvar, lscale=(plscale * YEAR), period=YEAR))
     ks_model = ks.BinaryModel(obs_type="probit")
     fit_params = {"method": "ep", "max_iter": 100, "lr": 1.0}
     super().__init__(ks_model, fit_params)
Exemplo n.º 2
0
def test_add_item_twice():
    """Item with same name cannot be added again."""
    kernel = ks.kernel.Constant(1.0)
    model = ks.BinaryModel()
    model.add_item("x", kernel)
    with pytest.raises(ValueError):
        model.add_item("x", kernel)
Exemplo n.º 3
0
 def __init__(self,
              dataset,
              obs_type="probit",
              method="ep",
              max_iter=100,
              lr=1.0):
     self.ks_model = ks.BinaryModel(obs_type=obs_type)
     self.fit_params = {"method": method, "max_iter": max_iter, "lr": lr}
     self.dataset = dataset
Exemplo n.º 4
0
 def __init__(self,
              kernel,
              obs_type="probit",
              method="ep",
              max_iter=100,
              lr=1.0):
     ks_model = ks.BinaryModel(obs_type=obs_type)
     fit_params = {"method": method, "max_iter": max_iter, "lr": lr}
     self._kern = kernel
     super().__init__(ks_model, fit_params)
Exemplo n.º 5
0
def test_damping():
    """Damping should work on a simple example."""
    kernel = ks.kernel.Constant(1.0)
    model = ks.BinaryModel()
    for x in ["A", "B", "C", "D"]:
        model.add_item(x, kernel=kernel)
    model.observe(winners=["C", "D"], losers=["A", "B"], t=0.0)
    model.observe(winners=["A", "B"], losers=["C", "D"], t=0.0)
    model.observe(winners=["A", "B"], losers=["C", "D"], t=0.0)
    # Without damping, this simple example diverges.
    assert not model.fit(max_iter=20)
    # However, a little bit of damping is enough to make it converge.
    assert model.fit(max_iter=20, lr=0.8)
Exemplo n.º 6
0
def test_saving():
    """Serializing a large(-ish) model with pickle should work."""
    random.seed(0)
    kernel = ks.kernel.Constant(1.0)
    model = ks.BinaryModel()
    for i in range(100):
        model.add_item(f"team{i}", kernel)
    for _ in range(500):
        i, j = random.sample(model.item.keys(), 2)
        model.observe(winners=[i], losers=[j], t=0.0)
    # Serialize & unserialize.
    data = pickle.dumps(model)
    model2 = pickle.loads(data)
    assert model2.item.keys() == model.item.keys()
    for obs, obs2 in zip(model.observations, model2.observations):
        assert obs.t == obs2.t
Exemplo n.º 7
0
 def __init__(self, *, cvar, wvar):
     ks_model = ks.BinaryModel(obs_type="probit")
     TTTModel.init_items(ks_model, cvar, wvar)
     fit_params = {"method": "ep", "max_iter": 100, "lr": 1.0}
     super().__init__(ks_model, fit_params)
Exemplo n.º 8
0
 def __init__(self, *, ovar, svar, wvar, method="ep"):
     ks_model = ks.BinaryModel(obs_type="probit")
     AffineWienerModel.init_items(ks_model, ovar, svar, wvar)
     fit_params = {"method": method, "max_iter": 100, "lr": 1.0}
     super().__init__(ks_model, fit_params)
Exemplo n.º 9
0
 def __init__(self, *, ovar, svar, dvar, lscale):
     ks_model = ks.BinaryModel(obs_type="probit")
     AffineDynamicModel.init_items(ks_model, ovar, svar, dvar, lscale)
     fit_params = {"method": "ep", "max_iter": 100, "lr": 1.0}
     super().__init__(ks_model, fit_params)
Exemplo n.º 10
0
    model = model_class(**data.get("model_args", {}))
    for item in data["items"]:
        kernel_class = getattr(ks.kernel, item["kernel_class"])
        kernel = kernel_class(**item["kernel_args"])
        model.add_item(item["name"], kernel=kernel)
    for obs in data["observations"]:
        model.observe(**obs)
    model.fit(**data.get("fit_args", {}))
    for name, scores in data["scores"].items():
        _, mean, var = model.item[name].scores
        assert np.allclose(scores["mean"], mean, rtol=1e-3)
        assert np.allclose(scores["var"], var, rtol=1e-3)
    assert np.allclose(model.log_likelihood, data["log_likelihood"], rtol=1e-3)


@pytest.mark.parametrize("model", [ks.BinaryModel(), ks.TernaryModel()])
def test_chronological_order(model):
    """Observations can only be added in chronological order."""
    model.add_item("x", kernel=ks.kernel.Constant(1.0))
    model.observe(winners=["x"], losers=[], t=1.0)
    with pytest.raises(ValueError):
        model.observe(winners=["x"], losers=[], t=0.0)


def test_damping():
    """Damping should work on a simple example."""
    kernel = ks.kernel.Constant(1.0)
    model = ks.BinaryModel()
    for x in ["A", "B", "C", "D"]:
        model.add_item(x, kernel=kernel)
    model.observe(winners=["C", "D"], losers=["A", "B"], t=0.0)