Пример #1
0
    def test_check_discrete_minibatch(self):
        disaster_data_t = tt.vector()
        disaster_data_t.tag.test_value = np.zeros(len(self.disaster_data))

        def create_minibatches():
            while True:
                return (self.disaster_data, )

        with Model():
            switchpoint = DiscreteUniform('switchpoint',
                                          lower=self.year.min(),
                                          upper=self.year.max(),
                                          testval=1900)

            # Priors for pre- and post-switch rates number of disasters
            early_rate = Exponential('early_rate', 1)
            late_rate = Exponential('late_rate', 1)

            # Allocate appropriate Poisson rates to years before and after current
            rate = tt.switch(switchpoint >= self.year, early_rate, late_rate)
            disasters = Poisson('disasters', rate, observed=disaster_data_t)

            with self.assertRaises(ValueError):
                advi_minibatch(n=10,
                               minibatch_RVs=[disasters],
                               minibatch_tensors=[disaster_data_t],
                               minibatches=create_minibatches())
Пример #2
0
    def test_advi_minibatch(self):
        n = 1000
        sd0 = 2.
        mu0 = 4.
        sd = 3.
        mu = -5.

        data = floatX(sd * np.random.randn(n) + mu)

        d = n / sd**2 + 1 / sd0**2
        mu_post = (n * np.mean(data) / sd**2 + mu0 / sd0**2) / d

        data_t = tt.vector()
        data_t.tag.test_value = floatX(np.zeros(1,))

        def create_minibatch(data):
            while True:
                data = np.roll(data, 100, axis=0)
                yield (data[:100],)

        minibatches = create_minibatch(data)

        with Model():
            mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
            x = Normal('x', mu=mu_, sd=sd, observed=data_t)
            advi_fit = advi_minibatch(
                n=1000, minibatch_tensors=[data_t],
                minibatch_RVs=[x], minibatches=minibatches,
                total_size=n, learning_rate=1e-1)

            np.testing.assert_allclose(advi_fit.means['mu'], mu_post, rtol=0.1)
            trace = sample_vp(advi_fit, 10000)

        np.testing.assert_allclose(np.mean(trace['mu']), mu_post, rtol=0.4)
        np.testing.assert_allclose(np.std(trace['mu']), np.sqrt(1. / d), rtol=0.4)

        # Test for n < 10
        with Model():
            mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
            x = Normal('x', mu=mu_, sd=sd, observed=data_t)
            advi_fit = advi_minibatch(
                n=5, minibatch_tensors=[data_t],
                minibatch_RVs=[x], minibatches=minibatches,
                total_size=n, learning_rate=1e-1)

        # Check to raise NaN with a large learning coefficient
        with pytest.raises(FloatingPointError):
            with Model():
                mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
                x = Normal('x', mu=mu_, sd=sd, observed=data_t)
                advi_fit = advi_minibatch(
                    n=1000, minibatch_tensors=[data_t],
                    minibatch_RVs=[x], minibatches=minibatches,
                    total_size=n, learning_rate=1e10)
Пример #3
0
    def test_advi_minibatch_shared(self):
        n = 1000
        sd0 = 2.
        mu0 = 4.
        sd = 3.
        mu = -5.

        data = sd * np.random.randn(n) + mu

        d = n / sd**2 + 1 / sd0**2
        mu_post = (n * np.mean(data) / sd**2 + mu0 / sd0**2) / d

        data_t = shared(np.zeros(1, ))

        def create_minibatches(data):
            while True:
                data = np.roll(data, 100, axis=0)
                yield (data[:100], )

        with Model():
            mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
            x = Normal('x', mu=mu_, sd=sd, observed=data_t)
            advi_fit = advi_minibatch(n=1000,
                                      minibatch_tensors=[data_t],
                                      minibatch_RVs=[x],
                                      minibatches=create_minibatches(data),
                                      total_size=n,
                                      learning_rate=1e-1)
            np.testing.assert_allclose(advi_fit.means['mu'], mu_post, rtol=0.1)
            trace = sample_vp(advi_fit, 10000)

        np.testing.assert_allclose(np.mean(trace['mu']), mu_post, rtol=0.4)
        np.testing.assert_allclose(np.std(trace['mu']),
                                   np.sqrt(1. / d),
                                   rtol=0.4)
Пример #4
0
    def test_advi_minibatch_shared(self):
        n = 1000
        sd0 = 2.
        mu0 = 4.
        sd = 3.
        mu = -5.

        data = sd * np.random.randn(n) + mu

        d = n / sd**2 + 1 / sd0**2
        mu_post = (n * np.mean(data) / sd**2 + mu0 / sd0**2) / d

        data_t = shared(np.zeros(1,))

        def create_minibatches(data):
            while True:
                data = np.roll(data, 100, axis=0)
                yield (data[:100],)

        with Model():
            mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
            x = Normal('x', mu=mu_, sd=sd, observed=data_t)
            advi_fit = advi_minibatch(
                n=1000, minibatch_tensors=[data_t],
                minibatch_RVs=[x], minibatches=create_minibatches(data),
                total_size=n, learning_rate=1e-1)
            np.testing.assert_allclose(advi_fit.means['mu'], mu_post, rtol=0.1)
            trace = sample_vp(advi_fit, 10000)

        np.testing.assert_allclose(np.mean(trace['mu']), mu_post, rtol=0.4)
        np.testing.assert_allclose(np.std(trace['mu']), np.sqrt(1. / d), rtol=0.4)
Пример #5
0
    def fit(self, x, y):
        if self.verbose:
            logging.info('Building pymc3 model')
        self.make_model(x, y)
        if self.verbose:
            logging.info('Sampling...')
        with self.model:
            minibatch_tensors = [
                self.product_id_var,
                self.route_id_var,
                self.client_id_var,
                self.adjusted_demand_var
            ]
            output_rvs = [self.adjusted_demand]
            total_size = x.shape[0]

            self.v_params = variational.advi_minibatch(
                n=int(1e6),
                minibatch_tensors=minibatch_tensors,
                minibatch_RVs=output_rvs,
                total_size=total_size,
                minibatches=self.minibatch_tensors(x, y)
            )
            plt.plot(self.v_params.elbo_vals[-int(1e5):])
            plt.savefig('./elbo.png')
            self.trace = variational.sample_vp(self.v_params)
            if self.verbose:
                print(pm.summary(self.trace[100:], varnames=['route_demand', 'client_demand']))
        return self
Пример #6
0
    def test_check_discrete_minibatch(self):
        disaster_data_t = tt.vector()
        disaster_data_t.tag.test_value = np.zeros(len(self.disaster_data))

        def create_minibatches():
            while True:
                return (self.disaster_data,)

        with Model():
            switchpoint = DiscreteUniform(
                'switchpoint', lower=self.year.min(), upper=self.year.max(), testval=1900)

            # Priors for pre- and post-switch rates number of disasters
            early_rate = Exponential('early_rate', 1)
            late_rate = Exponential('late_rate', 1)

            # Allocate appropriate Poisson rates to years before and after current
            rate = tt.switch(switchpoint >= self.year, early_rate, late_rate)
            disasters = Poisson('disasters', rate, observed=disaster_data_t)

            with self.assertRaises(ValueError):
                advi_minibatch(n=10, minibatch_RVs=[disasters], minibatch_tensors=[disaster_data_t],
                               minibatches=create_minibatches(), verbose=False)
Пример #7
0
def test_advi_minibatch():
    n = 1000
    sd0 = 2.
    mu0 = 4.
    sd = 3.
    mu = -5.

    data = sd * np.random.RandomState(0).randn(n) + mu

    d = n / sd**2 + 1 / sd0**2
    mu_post = (n * np.mean(data) / sd**2 + mu0 / sd0**2) / d

    data_t = tt.vector()
    data_t.tag.test_value = np.zeros(1, )

    with Model() as model:
        mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
        x = Normal('x', mu=mu_, sd=sd, observed=data_t)

    minibatch_RVs = [x]
    minibatch_tensors = [data_t]

    def create_minibatch(data):
        while True:
            data = np.roll(data, 100, axis=0)
            yield (data[:100], )

    minibatches = create_minibatch(data)

    with model:
        advi_fit = advi_minibatch(n=1000,
                                  minibatch_tensors=minibatch_tensors,
                                  minibatch_RVs=minibatch_RVs,
                                  minibatches=minibatches,
                                  total_size=n,
                                  learning_rate=1e-1,
                                  random_seed=1)

        np.testing.assert_allclose(advi_fit.means['mu'], mu_post, rtol=0.1)

        trace = sample_vp(advi_fit, 10000)

    np.testing.assert_allclose(np.mean(trace['mu']), mu_post, rtol=0.4)
    np.testing.assert_allclose(np.std(trace['mu']), np.sqrt(1. / d), rtol=0.4)
Пример #8
0
def test_advi_minibatch():
    n = 1000
    sd0 = 2.
    mu0 = 4.
    sd = 3.
    mu = -5.

    data = sd * np.random.RandomState(0).randn(n) + mu

    d = n / sd**2 + 1 / sd0**2
    mu_post = (n * np.mean(data) / sd**2 + mu0 / sd0**2) / d

    data_t = tt.vector()
    data_t.tag.test_value=np.zeros(1,)

    with Model() as model:
        mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
        x = Normal('x', mu=mu_, sd=sd, observed=data_t)
        
    minibatch_RVs = [x]
    minibatch_tensors = [data_t]

    def create_minibatch(data):
        while True:
            data = np.roll(data, 100, axis=0)
            yield (data[:100],)

    minibatches = create_minibatch(data)

    with model:
        advi_fit = advi_minibatch(
            n=1000, minibatch_tensors=minibatch_tensors, 
            minibatch_RVs=minibatch_RVs, minibatches=minibatches, 
            total_size=n, learning_rate=1e-1, random_seed=1
        )

        np.testing.assert_allclose(advi_fit.means['mu'], mu_post, rtol=0.1)

        trace = sample_vp(advi_fit, 10000)

    np.testing.assert_allclose(np.mean(trace['mu']), mu_post, rtol=0.4)
    np.testing.assert_allclose(np.std(trace['mu']), np.sqrt(1. / d), rtol=0.4)
Пример #9
0
    def test_advi_minibatch(self):
        n = 1000
        sd0 = 2.
        mu0 = 4.
        sd = 3.
        mu = -5.

        data = sd * np.random.randn(n) + mu

        d = n / sd**2 + 1 / sd0**2
        mu_post = (n * np.mean(data) / sd**2 + mu0 / sd0**2) / d

        data_t = tt.vector()
        data_t.tag.test_value = np.zeros(1, )

        def create_minibatch(data):
            while True:
                data = np.roll(data, 100, axis=0)
                yield (data[:100], )

        minibatches = create_minibatch(data)

        with Model():
            mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
            x = Normal('x', mu=mu_, sd=sd, observed=data_t)
            advi_fit = advi_minibatch(n=1000,
                                      minibatch_tensors=[data_t],
                                      minibatch_RVs=[x],
                                      minibatches=minibatches,
                                      total_size=n,
                                      learning_rate=1e-1)

            np.testing.assert_allclose(advi_fit.means['mu'], mu_post, rtol=0.1)
            trace = sample_vp(advi_fit, 10000)

        np.testing.assert_allclose(np.mean(trace['mu']), mu_post, rtol=0.4)
        np.testing.assert_allclose(np.std(trace['mu']),
                                   np.sqrt(1. / d),
                                   rtol=0.4)

        # Test for n < 10
        with Model():
            mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
            x = Normal('x', mu=mu_, sd=sd, observed=data_t)
            advi_fit = advi_minibatch(n=5,
                                      minibatch_tensors=[data_t],
                                      minibatch_RVs=[x],
                                      minibatches=minibatches,
                                      total_size=n,
                                      learning_rate=1e-1)

        # Check to raise NaN with a large learning coefficient
        with self.assertRaises(FloatingPointError):
            with Model():
                mu_ = Normal('mu', mu=mu0, sd=sd0, testval=0)
                x = Normal('x', mu=mu_, sd=sd, observed=data_t)
                advi_fit = advi_minibatch(n=1000,
                                          minibatch_tensors=[data_t],
                                          minibatch_RVs=[x],
                                          minibatches=minibatches,
                                          total_size=n,
                                          learning_rate=1e10)