Esempio n. 1
0
def _compute_ev_det_loglike(physics, event, det):

  # load the station tuple and compute basic event-station attributes like
  # distance, travel time, azimuth, and azimuth difference

  stanum = det.stanum
  station = STATIONS[stanum]
  
  dist = compute_distance((station.lon, station.lat),
                          (event.lon, event.lat))

  ttime = compute_travel_time(dist)
  
  sta_to_ev_az = compute_azimuth((station.lon, station.lat),
                                 (event.lon, event.lat))

  # the azimuth difference of observed to theoretical
  degdiff = compute_degdiff(sta_to_ev_az, det.azimuth)
  
  loglike = 0

  # detection probability
  
  detprob = logistic(physics.mu_d0[stanum]
                     + physics.mu_d1[stanum] * event.mag
                     + physics.mu_d2[stanum] * dist)

  loglike += log(detprob)

  # detection time

  loglike += laplace.logpdf(det.time,
                            event.time + ttime + physics.mu_t[stanum],
                            physics.theta_t[stanum])


  # detection azimuth
  
  loglike += laplace.logpdf(degdiff, physics.mu_z[stanum],
                            physics.theta_z[stanum])

  # detection slowness

  loglike += laplace.logpdf(det.slowness,
                            compute_slowness(dist) + physics.mu_s[stanum],
                            physics.theta_s[stanum])

  # detection amplitude

  loglike += norm.logpdf(log(det.amp),
                         physics.mu_a0[stanum]
                         + physics.mu_a1[stanum] * event.mag
                         + physics.mu_a2[stanum] * dist,
                         physics.sigma_a[stanum])
  
  return loglike
Esempio n. 2
0
def _compute_ev_det_loglike(physics, event, det):

    # load the station tuple and compute basic event-station attributes like
    # distance, travel time, azimuth, and azimuth difference

    stanum = det.stanum
    station = STATIONS[stanum]

    dist = compute_distance((station.lon, station.lat), (event.lon, event.lat))

    ttime = compute_travel_time(dist)

    sta_to_ev_az = compute_azimuth((station.lon, station.lat),
                                   (event.lon, event.lat))

    # the azimuth difference of observed to theoretical
    degdiff = compute_degdiff(sta_to_ev_az, det.azimuth)

    loglike = 0

    # detection probability

    detprob = logistic(physics.mu_d0[stanum] +
                       physics.mu_d1[stanum] * event.mag +
                       physics.mu_d2[stanum] * dist)

    loglike += log(detprob)

    # detection time

    loglike += laplace.logpdf(det.time,
                              event.time + ttime + physics.mu_t[stanum],
                              physics.theta_t[stanum])

    # detection azimuth

    loglike += laplace.logpdf(degdiff, physics.mu_z[stanum],
                              physics.theta_z[stanum])

    # detection slowness

    loglike += laplace.logpdf(det.slowness,
                              compute_slowness(dist) + physics.mu_s[stanum],
                              physics.theta_s[stanum])

    # detection amplitude

    loglike += norm.logpdf(
        log(det.amp), physics.mu_a0[stanum] +
        physics.mu_a1[stanum] * event.mag + physics.mu_a2[stanum] * dist,
        physics.sigma_a[stanum])

    return loglike
    def _log_likelihood(self, y, X, beta):
        """
        Overwrites the _log_likelihood method inherited from the RegressorMCMC
        class to calculate the log-likelihood of the linear regression
        coefficients given normally-distributed data. It is used in the
        model fitting process. 

        Parameters
        ----------
        y : numpy array
            A 1-D vector of 0s and 1s representing the two classes.
        X : numpy array
            A 2-D matrix where rows represent observations and columns 
            represent variables.
        beta : numpy array
            A 1-D vector of coefficients in a logistic regression model.

        Returns
        -------
        _log_likelihood : float
            The log-likelihood of the beta vector given the data.

        """
        
        # Predict y given the current coefficients and X.
        predicted_y = np.matmul(X, beta)
        
        # Calculate the log-likelihood of beta given the data.
        _log_likelihood = np.sum(laplace.logpdf(y,
                                                loc=predicted_y,
                                                scale=self._y_scale))
        
        return _log_likelihood
 def test_scipy(self):
     x = [8, 9]
     if SP:
         assert_almost_equal(
         laplace.logpdf(x, loc=self.parameters['mu'], scale=1./self.parameters['tau']).sum(),
         laplace_like(x, **self.parameters)
         )
Esempio n. 5
0
 def test_scipy(self):
     x = [8, 9]
     if SP:
         assert_almost_equal(
             laplace.logpdf(x,
                            loc=self.parameters['mu'],
                            scale=1. / self.parameters['tau']).sum(),
             laplace_like(x, **self.parameters))
    def test_logprob(self):
        mu = Variable(torch.randn(100))
        b = torch.exp(Variable(torch.randn(100)))
        value = Variable(torch.randn(100))
        dist = Laplace(mu, b)

        # test log probability
        res1 = dist.log_prob(value).data
        res2 = laplace.logpdf(value.data.numpy(), mu.data.numpy(),
                              b.data.numpy())
        self.assertEqual(res1, res2)
Esempio n. 7
0
    def test_log_pdf(self, dtype, location, location_is_samples, scale,
                     scale_is_samples, rv, rv_is_samples, num_samples):
        is_samples_any = any(
            [location_is_samples, scale_is_samples, rv_is_samples])
        rv_shape = rv.shape[1:] if rv_is_samples else rv.shape
        n_dim = 1 + len(
            rv.shape) if is_samples_any and not rv_is_samples else len(
                rv.shape)
        location_np = numpy_array_reshape(location, location_is_samples, n_dim)
        scale_np = numpy_array_reshape(scale, scale_is_samples, n_dim)
        rv_np = numpy_array_reshape(rv, rv_is_samples, n_dim)

        log_pdf_np = laplace.logpdf(rv_np, location_np, scale_np)
        var = Laplace.define_variable(shape=rv_shape, dtype=dtype).factor

        location_mx = mx.nd.array(location, dtype=dtype)
        if not location_is_samples:
            location_mx = add_sample_dimension(mx.nd, location_mx)
        var_mx = mx.nd.array(scale, dtype=dtype)
        if not scale_is_samples:
            var_mx = add_sample_dimension(mx.nd, var_mx)
        rv_mx = mx.nd.array(rv, dtype=dtype)
        if not rv_is_samples:
            rv_mx = add_sample_dimension(mx.nd, rv_mx)
        variables = {
            var.location.uuid: location_mx,
            var.scale.uuid: var_mx,
            var.random_variable.uuid: rv_mx
        }
        log_pdf_rt = var.log_pdf(F=mx.nd, variables=variables)

        assert np.issubdtype(log_pdf_rt.dtype, dtype)
        assert array_has_samples(mx.nd, log_pdf_rt) == is_samples_any
        if is_samples_any:
            assert get_num_samples(mx.nd, log_pdf_rt) == num_samples
        if np.issubdtype(dtype, np.float64):
            rtol, atol = 1e-7, 1e-10
        else:
            rtol, atol = 1e-4, 1e-5
        assert np.allclose(log_pdf_np,
                           log_pdf_rt.asnumpy(),
                           rtol=rtol,
                           atol=atol)
Esempio n. 8
0
 def __call__(self, u: np.ndarray, y: float) -> np.ndarray:
     return laplace.logpdf(x=u, loc=y, scale=self.scale)
Esempio n. 9
0
    def _logp(self, value, loc, scale, k):

        return np.sum(laplace.logpdf(value, loc=loc, scale=scale))
Esempio n. 10
0
def main(argv):
	### PLOT BASIS VECTOR NORMS

	subplot(0, 0, spacing=1.)

	legend_entries = []

	for model in models:
		results = Experiment(model['path'])

		isa = results['model'].model[1].model
		dct = results['model'].transforms[0]

		# basis in whitened pixel space
		A = dot(dct.A[1:].T, isa.A)

		# basis vector norms
		norms = sort(sqrt(sum(square(A), 0)))[::-1]

		plot(norms,
			color=model['color'],
			line_width=1.2)
		plot([len(norms), len(norms) + 1, 255], [norms[-1], 0, 0],
			color=model['color'],
			line_style='densely dashed',
			line_width=1.2,
			pgf_options=['forget plot'])

		legend_entries.append(model['legend'])

	xlabel('Basis coefficient, $i$')
	ylabel('Basis vector norm, $||a_i||$')
	legend(*legend_entries, location='north east')
	axis(width=5, height=4)
	axis([0, 256, 0, 1])
	xtick([64, 128, 192, 256])
	grid()



	### VISUALIZE BASIS

	subplot(0, 1)

	results = Experiment(model['path'])

	isa = results['model'].model[1].model
	dct = results['model'].transforms[0]

	# basis in whitened pixel space
	A = dot(dct.A[1:].T, isa.A)
	indices = argsort(sqrt(sum(square(A), 0)))[::-1]
	A = A[:, indices]

	# adjust intensity range
	a = percentile(abs(A).ravel(), PERC)
	A = (A + a) / (2. * a) * 255. + 0.5
	A[A < 0.] = 0.5
	A[A > 256.] = 255.5
	A = asarray(A, 'uint8')

	# stitch together into a single image
	patch_size = int(sqrt(A.shape[0]) + 0.5)
	patches = stitch(A.T.reshape(-1, patch_size, patch_size), num_cols=NUM_COLS)
	patches = repeat(repeat(patches, RES, 0), RES, 1)

	imshow(patches, dpi=75 * RES)
	rectangle(72 * RES, 80.8 * RES, 64 * RES, 55.2 * RES,
		color=RGB(1.0, 0.8, 0.5),
		line_width=1.,
		line_style='densely dashed')

	axis(
		height=4, 
		width=4,
		ticks='none', 
		axis_on_top=False,
		clip=False,
		pgf_options=['xlabel style={yshift=-0.47cm}', 'clip=false'])

	savefig('results/vanhateren/overcompleteness.tex')
	draw()



	### MARGINAL SOURCE DISTRIBUTIONS

	figure()
	samples = []
	for gsm in isa.subspaces:
		samples.append(gsm.sample(1000))

	perc = percentile(hstack(samples), 99.5)
	xvals = linspace(-perc, perc, 100)

	for i in range(1, 7):
		for j in range(8, 15):
			try:
				gsm = isa.subspaces[indices[i * NUM_COLS + j]]
			except:
				pass
			else:
				subplot(7 - i, j, spacing=0)
				plot(xvals, laplace.logpdf(xvals, scale=sqrt(0.5)).ravel(), line_width=0.8, color=RGB(0.1, 0.6, 1.0))
				plot(xvals, gsm.loglikelihood(xvals.reshape(1, -1)).ravel(), 'k', line_width=1.)
				gca().width = 4. / 6.
				gca().height = 4. / 6.
				axis([-perc, perc, -6., 2.])
				xtick([])
				ytick([])

	savefig('results/vanhateren/marginals.tex')
	draw()

	return 0
Esempio n. 11
0
def main(argv):
	experiment = Experiment(argv[1])

	isa = experiment['model'].model[1].model
	dct = experiment['model'].transforms[0]



	### BASIS

	# basis in pixel space
	A = dot(dct.A[1:].T, isa.A)

	# sort by norm
	norms = sqrt(sum(square(A), 0))
	indices = argsort(norms)[::-1]
#	A = A[:, indices]

	# adjust intensity range
	a = percentile(abs(A).ravel(), PERC)
	A = (A + a) / (2. * a) * 255. + 0.5
	A[A < 0.] = 0.5
	A[A > 256.] = 255.5
	A = asarray(A, 'uint8')

	# stitch together into a single image
	patch_size = int(sqrt(A.shape[0]) + 0.5)
	patches = stitch(A.T.reshape(-1, patch_size, patch_size), num_cols=NUM_COLS)
	patches = repeat(repeat(patches, RES, 0), RES, 1)

	imshow(patches, dpi=75 * RES)
	axis('off')

	draw()



	### SAMPLES

	samples = experiment['model'].sample(128)

	a = percentile(abs(samples).ravel(), PERC)
	samples = (samples + a) / (2. * a) * 255. + 0.5
	samples[samples < 0.] = 0.5
	samples[samples > 256.] = 255.5
	samples = asarray(samples, 'uint8')

	samples = stitch(samples.T.reshape(-1, patch_size, patch_size))
	samples = repeat(repeat(samples, RES, 0), RES, 1)

	# visualize samples
	figure()
	imshow(samples, dpi=75 * RES)
	title('Samples')
	axis('off')
	draw()


	
	### MARGINAL SOURCE DISTRIBUTIONS

	figure()
	samples = []
	for gsm in isa.subspaces:
		samples.append(gsm.sample(1000))

	perc = percentile(hstack(samples), 99.5)
	xvals = linspace(-perc, perc, 100)

	for i in range(0, 8):
		for j in range(0, 16):
			try:
				gsm = isa.subspaces[indices[i * NUM_COLS + j]]
			except:
				pass
			else:
				subplot(7 - i, j, spacing=0)
				plot(xvals, laplace.logpdf(xvals, scale=sqrt(0.5)).ravel(), 'k', opacity=0.5)
				plot(xvals, gsm.loglikelihood(xvals.reshape(1, -1)).ravel(), 'b-', line_width=1.)
				gca().width = 0.8
				gca().height = 0.8
				axis([-perc, perc, -6., 2.])
				xtick([])
				ytick([])

	draw()

	return 0
Esempio n. 12
0
def main(argv):
    ### PLOT BASIS VECTOR NORMS

    subplot(0, 0, spacing=1.)

    legend_entries = []

    for model in models:
        results = Experiment(model['path'])

        isa = results['model'].model[1].model
        dct = results['model'].transforms[0]

        # basis in whitened pixel space
        A = dot(dct.A[1:].T, isa.A)

        # basis vector norms
        norms = sort(sqrt(sum(square(A), 0)))[::-1]

        plot(norms, color=model['color'], line_width=1.2)
        plot([len(norms), len(norms) + 1, 255], [norms[-1], 0, 0],
             color=model['color'],
             line_style='densely dashed',
             line_width=1.2,
             pgf_options=['forget plot'])

        legend_entries.append(model['legend'])

    xlabel('Basis coefficient, $i$')
    ylabel('Basis vector norm, $||a_i||$')
    legend(*legend_entries, location='north east')
    axis(width=5, height=4)
    axis([0, 256, 0, 1])
    xtick([64, 128, 192, 256])
    grid()

    ### VISUALIZE BASIS

    subplot(0, 1)

    results = Experiment(model['path'])

    isa = results['model'].model[1].model
    dct = results['model'].transforms[0]

    # basis in whitened pixel space
    A = dot(dct.A[1:].T, isa.A)
    indices = argsort(sqrt(sum(square(A), 0)))[::-1]
    A = A[:, indices]

    # adjust intensity range
    a = percentile(abs(A).ravel(), PERC)
    A = (A + a) / (2. * a) * 255. + 0.5
    A[A < 0.] = 0.5
    A[A > 256.] = 255.5
    A = asarray(A, 'uint8')

    # stitch together into a single image
    patch_size = int(sqrt(A.shape[0]) + 0.5)
    patches = stitch(A.T.reshape(-1, patch_size, patch_size),
                     num_cols=NUM_COLS)
    patches = repeat(repeat(patches, RES, 0), RES, 1)

    imshow(patches, dpi=75 * RES)
    rectangle(72 * RES,
              80.8 * RES,
              64 * RES,
              55.2 * RES,
              color=RGB(1.0, 0.8, 0.5),
              line_width=1.,
              line_style='densely dashed')

    axis(height=4,
         width=4,
         ticks='none',
         axis_on_top=False,
         clip=False,
         pgf_options=['xlabel style={yshift=-0.47cm}', 'clip=false'])

    savefig('results/vanhateren/overcompleteness.tex')
    draw()

    ### MARGINAL SOURCE DISTRIBUTIONS

    figure()
    samples = []
    for gsm in isa.subspaces:
        samples.append(gsm.sample(1000))

    perc = percentile(hstack(samples), 99.5)
    xvals = linspace(-perc, perc, 100)

    for i in range(1, 7):
        for j in range(8, 15):
            try:
                gsm = isa.subspaces[indices[i * NUM_COLS + j]]
            except:
                pass
            else:
                subplot(7 - i, j, spacing=0)
                plot(xvals,
                     laplace.logpdf(xvals, scale=sqrt(0.5)).ravel(),
                     line_width=0.8,
                     color=RGB(0.1, 0.6, 1.0))
                plot(xvals,
                     gsm.loglikelihood(xvals.reshape(1, -1)).ravel(),
                     'k',
                     line_width=1.)
                gca().width = 4. / 6.
                gca().height = 4. / 6.
                axis([-perc, perc, -6., 2.])
                xtick([])
                ytick([])

    savefig('results/vanhateren/marginals.tex')
    draw()

    return 0