Esempio n. 1
0
def plot_fft(fig, config):
	CASTS = [float, float]
	FIELDS = ["frequency", config.pgtype]

	# Iterate over all inputs.
	nfiles = len(config.ifiles)
	for i, ifile in enumerate(config.ifiles):
		# Get the periodogram information.
		with open(ifile, "r", newline="") as f:
			fx, fy = utils.csv_column_read(f, FIELDS, casts=CASTS)

		# Create a latex axis.
		ax = utils.latexify(fig.add_subplot("%d%d%d" % (nfiles, 1, i + 1)))

		# Plot the periodogram.
		ax.plot(fx, fy, color="k", linestyle="-", marker="None")
		ax.set_axisbelow(True)
		ax.set_xlabel("Frequency ($\mu$Hz)")

		# Make sure the axis names are correct.
		if config.pgtype == "amplitude":
			ax.set_ylabel("Amplitude (ppm)")
		elif config.pgtype == "psd":
			ax.set_ylabel("PSD (ppm$^2$ $\mu$Hz$^{-1}$)")

		if config.maxx > config.minx >= 0:
			ax.set_xlim([config.minx, config.maxx])
		if config.maxy > config.miny >= 0:
			ax.set_ylim([config.miny, config.maxy])

	plt.legend()
	fig.tight_layout()
Esempio n. 2
0
def plot_echelle(config, fig, ifile):
	CASTS = [float, float]
	FIELDS = ["frequency", config.pgtype]

	if config.deltanu is None:
		raise NotImplementedError("∆𝜈 autocorrelation not implemented")

	# Get the cadence information.
	with open(ifile, "r", newline="") as f:
		fx, fy = utils.csv_column_read(f, FIELDS, casts=CASTS)

	# Filter start and end.
	filt = numpy.ones_like(fx, dtype=bool)
	if config.start is not None:
		filt &= fx >= config.start * config.deltanu
	if config.end is not None:
		filt &= fx <= (config.end + 1) * config.deltanu

	fx = fx[filt]
	fy = fy[filt]

	# Convert from power to amplitude.
	if config.pgtype == "psd":
		pass
		#fy *= numpy.diff(fx).mean()
		#fy **= 0.5
		#pass

	# Convert to wrapped echelle.
	fx += config.off
	fx %= config.deltanu

	# Sort the arrays.
	sfilt = numpy.argsort(fx)
	fx = fx[sfilt]
	fy = fy[sfilt]

	# Smooth using a Savgol filter.
	#itp = scipy.interpolate.interp1d(fx, fy, kind='linear')
	#fy = scipy.signal.savgol_filter(itp(fx), config.width, 6)
	win = scipy.signal.boxcar(config.width)
	fy = scipy.signal.convolve(fy, win, mode="same")

	# Plot.
	ax = utils.latexify(fig.add_subplot(111))
	ax.plot(fx, fy, color="r")
	ax.set_ylabel("PSD (ppm$^2$$\mu$Hz$^{-1}$)")
	ax.set_xlabel("Frequency mod %s ($\mu$Hz)" % (config.deltanu,))

	plt.legend()
	fig.tight_layout()
Esempio n. 3
0
import os
import numpy as np
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt

from lbg_maker import lbg_maker
from prep_filters import prep_filters
from madau import lephare_madau
from extinction import calzetti00
from extinction import apply as ext_apply
from utils import latexify

latexify(columns=2,
         equal=False,
         fontsize=10,
         ratio=None,
         ggplot=True,
         usetex=True)

root = os.environ['BEAST']

redshifts = 1.5 + np.linspace(0., 4.0, 4)
magnitudes = 22.5 * np.ones_like(redshifts)

flux, wave, meta = lbg_maker(ngal=None,
                             restframe=False,
                             printit=False,
                             test=True,
                             seed=314,
                             redshifts=None,
                             magnitudes=None,
Esempio n. 4
0
    plotit = False

    fnames = [
        os.environ['LBGCMB'] + '/dropouts/Shapley/dat/composites/' + file
        for file in spectra
    ]
    fnames += [
        os.environ['LBGCMB'] +
        '/quickspectra/spectra/spec-qso-z1.5-rmag22.24.dat'
    ]
    fnames += [
        os.environ['LBGCMB'] +
        '/quickspectra/spectra/spec-elg-o2flux-8e-17.dat'
    ]

    latexify(columns=2, fontsize=10, ggplot=True)

    ##  Get noise curves and filters.
    ##  exp     =  60 * 60                             ##  [Seconds].
    ##  noise   =  get_noisecurve(exp, plotit=plotit)

    filters = get_filters(plotit=plotit)

    for redshift in redshifts:
        print('\n\nSolving for redshift: %.3lf.' % redshift)

        owaves = np.arange(1., 1.e4, 0.1)
        output = [owaves]

        for kk, fname in enumerate(fnames):
            print('\nLoading:  %s' % fname)
Esempio n. 5
0
    def prepare_cards(self, parsed, arguments, evaluator, evaluated):
        components, cards, evaluated, is_function = self.get_cards(arguments, evaluator, evaluated)

        if is_function:
            latex_input = ''.join(['<script type="math/tex; mode=display">',
                                   latexify(parsed, evaluator),
                                   '</script>'])
        else:
            latex_input = mathjax_latex(evaluated)

        result = []

        ambiguity = self.disambiguate(arguments)
        if ambiguity:
            result.append({
                "ambiguity": ambiguity[0],
                "description": ambiguity[1]
            })

        result.append({
            "title": "SymPy",
            "input": removeSymPy(parsed),
            "output": latex_input
        })

        if cards:
            if any(get_card(c).is_multivariate() for c in cards):
                result[-1].update({
                    "num_variables": len(components['variables']),
                    "variables": map(repr, components['variables']),
                    "variable": repr(components['variable'])
                })

        # If no result cards were found, but the top-level call is to a
        # function, then add a special result card to show the result
        if not cards and not components['variable'] and is_function:
            result.append({
                'title': 'Result',
                'input': removeSymPy(parsed),
                'output': format_by_type(evaluated, arguments, mathjax_latex)
            })
        else:
            var = components['variable']

            # If the expression is something like 'lcm(2x, 3x)', display the
            # result of the function before the rest of the cards
            if is_function and not is_function_handled(arguments[0]):
                result.append(
                    {"title": "Result", "input": "",
                     "output": format_by_type(evaluated, arguments, mathjax_latex)})

            line = "simplify(input_evaluated)"
            simplified = evaluator.eval(line,
                                        use_none_for_exceptions=True,
                                        repr_expression=False)
            if (simplified != None and
                simplified != evaluated and
                arguments.args and
                len(arguments.args) > 0 and
                simplified != arguments.args[0]):
                result.append(
                    {"title": "Simplification", "input": repr(simplified),
                     "output": mathjax_latex(simplified)})
            elif arguments.function == 'simplify':
                result.append(
                    {"title": "Simplification", "input": "",
                     "output": mathjax_latex(evaluated)})

            for card_name in cards:
                card = get_card(card_name)

                if not card:
                    continue

                try:
                    result.append({
                        'card': card_name,
                        'var': repr(var),
                        'title': card.format_title(evaluated),
                        'input': card.format_input(repr(evaluated), components),
                        'pre_output': latex(
                            card.pre_output_function(evaluated, var)),
                        'parameters': card.card_info.get('parameters', [])
                    })
                except (SyntaxError, ValueError) as e:
                    pass

            if is_function:
                learn_more = find_learn_more_set(arguments[0])
                if learn_more:
                    result.append({
                        "title": "Learn More",
                        "input": '',
                        "output": learn_more
                    })
        return result
Esempio n. 6
0
def plot_ilims(results, plot_des=False, plot_hsc=True):
    import os
    import pylab as pl
    import matplotlib.pyplot as plt

    from matplotlib.pyplot import text
    from specs import samplestats
    from selection_box import detection_bands

    latexify(fig_width=None, fig_height=None, columns=2, equal=False)

    colors = ['b', 'r', 'indigo']

    ##  Load Goldrush basic stats.
    stats = samplestats(printit=False)

    ##  Create figure.
    fig, axarray = plt.subplots(1, 1, sharey=False)
    fig.set_size_inches(6.5, 3.5)

    ##  Catch a one plot call.
    if not isinstance(axarray, np.ndarray):
        axarray = np.array([axarray])

    for k, dropband in enumerate(results.keys()):
        for ldepth, label in zip(['D'], ['GOLDRUSH Deep']):
            magbins, pnbar, counts = results[dropband][ldepth]

            ##  Plot ang_nbar against limiting mag. (rightmost edge).
            axarray[k].semilogy(magbins[1:],
                                pnbar,
                                '-',
                                label=r'$g-$' + label,
                                markersize=3,
                                lw=1.)

        for contamination, color in zip(['D', 'W'], ['dodgerblue', 'indigo']):
            rate = get_contamination(magbins[1:],
                                     round(stats[dropband]['z']),
                                     contamination,
                                     depth=ldepth)

            axarray[k].semilogy(magbins[1:], pnbar * (1. - rate), '--', label='Less %s' % contamination + r' ($\simeq$' + '%.2lf) interlopers' % effective_depth(dropband, contamination),\
                                                                        markersize=3, lw=1., c=color, dashes=[3, 1], alpha=0.4)
        '''
        if plot_des:
          ##  DES depths/yr estimate                                                                                                                    
          depths = des_depths()

          ##  Plot the magnitude limit of DES SV. 
          axarray[k].axvline(depths['SV'][detection_bands[dropband]], c='k', linestyle='-', label='DES SV', lw = 0.5)
        
          ##  and the magnitude limits per year (estimated for year greater than one).
          for year in np.arange(1, 6, 1):        
            axarray[k].axvline(depths['Y' + str(year)][detection_bands[dropband]], c='k', linestyle='-', label='', lw = 0.5)
        '''

        root = os.environ['LBGCMB']
        data = np.loadtxt(
            root +
            "/dropouts/nz/schechter/dat/schechter_estimate_%s_dropouts.txt" %
            dropband)

        ##  0.59 completeness frac.
        axarray[k].semilogy(data[:, 0],
                            data[:, 1],
                            'k',
                            label='Best-fit UV Schechter fn.',
                            alpha=0.5)

        ##  0.59 completeness frac.  Galaxies only.
        axarray[k].semilogy(data[:, 0],
                            galaxy_frac(data[:, 0]) * data[:, 1],
                            'k--',
                            label='Best-fit galaxy UV Schechter fn.',
                            dashes=[3, 1])

        ymax = pnbar[magbins < 24.5].max()

        axarray[k].set_xlim(23.0, 26.5)  ## [magbins[1], magbins[-2]]
        axarray[k].set_ylim([10., 8.e3])

        axarray[k].set_xlabel(r'$%s_{\rm{AB}}$' % detection_bands[dropband])
        axarray[k].set_ylabel(r'$N(<%s)$ / deg$^2$' %
                              detection_bands[dropband])

        ## axarray[k].get_yaxis().get_major_formatter().set_scientific(False)
        ## axarray[k].ticklabel_format(style='sci', scilimits=(-1, 2))

    pl.legend(loc=4)
    pl.savefig('plots/hsc_icut.pdf', bbox_inches='tight')
Esempio n. 7
0
import numpy as np
import pylab as pl

from   utils import latexify


latexify(fig_width=None, fig_height=None, columns=2, equal=False, fontsize=10, ratio=None, ggplot=True, usetex=True)

##  Observational Stellar mass.
for zee in [4, 5]:
  dat        = np.loadtxt('dat/data-compilation/smf_ms/kslee_z%d.smf' % zee)
  Mstar      = 10. ** dat[:,0]  ## [Msun].
  nbarperdex = 10. ** dat[:,1]  ## [Mpc^-3 dex^{-1}]

  pl.loglog(Mstar, nbarperdex, '^', markersize=3, label=r'$z=%.1lf$' % zee)

pl.legend()
pl.xlabel(r'$M_* \ [M_{\odot}]$')
pl.ylabel(r'$\bar{n} \ [\rm{Mpc}^{-3} \ \rm{dex}^{-1}] $')
pl.savefig('plots/obs_sm.pdf', bbox_inches='tight')

Esempio n. 8
0
import os
import json
import numpy as np
import pylab as pl
import matplotlib.pyplot as plt

from utils import latexify
from fisher_contour import plot_ellipse

##  And plot contour ...
pl.clf()

latexify(columns=1, equal=True, fontsize=12)

fig = plt.gcf()
ax = plt.gca()

band = 'u'
int_frac = 0.06

with open('dat/result4interloper_%s_intfrac_%.2lf.json' % (band, int_frac),
          'r') as rfile:
    ##  {'peakz': peakz, 'fid_sig8': np.float(fid_sig8), 'fid_b1': np.float(fid_b1), 'biased_sig8': biased_sig8, 'biased_b1': biased_b1, 'iFisher': iFisher.tolist()}
    data = json.load(rfile)

##  Unpack.
peakz = data['peakz']
fid_sig8 = data['fid_sig8']
fid_b1 = data['fid_b1']
biased_sig8 = data['biased_sig8']
biased_b1 = data['biased_b1']
Esempio n. 9
0
anll_train = sm_common.anll(data_common_train)
anll_test = sm_common.anll(data_common_test)
print("Common model")
print("\t", info)
print("\t", anll_train, anll_test)

# Visualize model
states = list(G_state.nodes())
L = nx.laplacian_matrix(G_state).todense()
eigvals, eigvecs = np.linalg.eig(L)
eigvals_sorted = np.argsort(eigvals)
first_nonzero_eigvec = eigvecs[:, eigvals_sorted[1]]
states_sorted = [x for _, x in sorted(zip(first_nonzero_eigvec, states))]

statemap = {}
for state in states_sorted:
    state_vals = []
    for year in years:
        state_vals.append(float(sm_strat.G.node[(state, year)]['theta']))
    statemap[state] = state_vals
output = pd.DataFrame(statemap).T
output.columns = years

latexify(fig_width=13, fontsize=10)
plt.pcolormesh(output, cmap='coolwarm_r')
plt.yticks(np.arange(0.5, len(output.index), 1), output.index)
plt.xticks(np.arange(0.5, len(output.columns), 1), output.columns)
plt.colorbar()
plt.savefig('./figs/election_heatmap.pdf')
plt.close()
Esempio n. 10
0
import matplotlib as mpl
import numpy as np
import glob
import os
import pylab as pl
import matplotlib.pyplot as plt

from utils import latexify

if __name__ == "__main__":
    print('\n\nWelcome to a (plotter) of the angular correlation fn.\n\n')

    latexify(fig_width=None,
             fig_height=None,
             columns=2,
             equal=False,
             ratio=0.5,
             fontsize=12)

    pl.clf()
    '''
    Makes a 2 panel plot of w(theta) and b(r).
    '''
    scale = 2.

    tlo, thi = 30.0, 1000.0
    arcsec = np.pi / 180. / 3600.

    ##  First plot w(theta) for z=3 and 4.
    cc = 'darkblue'
    fig = pl.gcf()
Esempio n. 11
0
def main(show=False):
    margin = .05  # margin for drawing box

    initial_pos = 1
    final_pos = 1

    n = 100

    all_pos = np.linspace(0, 1, n)

    # Complicated lane example

    box_size = .18
    box_pos = [.2, .5, .8]
    box_orientation = [-1, 1, -1]

    x = cp.Variable(n)

    cons = [x[0] == initial_pos, -2 <= x, x <= 2, x[-1] == -1]

    obj = 0.0

    for i, pos in enumerate(all_pos):
        obj += sccf.minimum(cp.square(x[i] + 1), 1)
        obj += sccf.minimum(cp.square(x[i] - 1), 1)

        for b_pos, b_or in zip(box_pos, box_orientation):
            if b_pos <= pos and pos <= b_pos + box_size:
                cons.append(x[i] >= 0 if b_or > 0 else x[i] <= 0)

    for idx, weight in enumerate([10, 1, .1]):
        obj += weight * cp.sum_squares(cp.diff(x, k=idx + 1))

    prob = sccf.Problem(obj, cons)
    tic = time.time()
    result = prob.solve()
    toc = time.time()

    print("lane change 2:", obj.value)
    print("time:", toc - tic)
    print("iters:", result["iters"])

    latexify(fig_width=7, fig_height=2)
    plt.plot(all_pos * 100, x.value, c='black')
    plt.ylim(-2, 2)
    for pos, orientation in zip(box_pos, box_orientation):
        plt.gca().add_patch(
            Rectangle((pos * 100, .25 if orientation < 0 else -1.75),
                      (box_size - margin) * 100,
                      1.5,
                      facecolor='none',
                      edgecolor='k'))
    plt.axhline(0, ls='--', c='k')
    plt.savefig("figs/lane_changing.pdf")
    if show:
        plt.show()

    # Lower bound
    obj = 0

    z_top = [cp.Variable(n) for _ in range(n)]
    z_bottom = [cp.Variable(n) for _ in range(n)]

    x = cp.Variable(n)
    cons = [x[0] == initial_pos, -2 <= x, x <= 2, x[-1] == -1]

    lam_top = cp.Variable(n)
    lam_bottom = cp.Variable(n)
    cons.append(0 <= lam_top)
    cons.append(0 <= lam_bottom)
    cons.append(lam_top <= 1)
    cons.append(lam_bottom <= 1)

    for z, lam in zip(z_top + z_bottom, lam_top + lam_bottom):
        cons.append(z[0] == initial_pos * lam)
        cons.append(-2 * lam <= z)
        cons.append(z <= 2 * lam)
        cons.append(z[-1] == -1 * lam)

    for i, pos in enumerate(all_pos):
        obj += cp.quad_over_lin(z_top[i][i] + lam_top[i],
                                lam_top[i]) + (1 - lam_top[i])
        obj += cp.quad_over_lin(z_bottom[i][i] - lam_bottom[i],
                                lam_bottom[i]) + (1 - lam_bottom[i])

        for b_pos, b_or in zip(box_pos, box_orientation):
            if b_pos <= pos and pos <= b_pos + box_size:
                for z in z_top + z_bottom + [x]:
                    cons.append(z[i] >= 0 if b_or > 0 else z[i] <= 0)

    for idx, weight in enumerate([10, 1, .1]):
        for z, lam in zip(z_top + z_bottom, lam_top + lam_bottom):
            obj += weight * cp.quad_over_lin(cp.diff(z, k=idx + 1),
                                             lam) / (2 * n)
            obj += weight * cp.quad_over_lin(cp.diff(x - z, k=idx + 1),
                                             1 - lam) / (2 * n)

    prob = cp.Problem(cp.Minimize(obj), cons)
    obj_value = prob.solve(solver=cp.MOSEK)

    print("lane change lower bound:", obj_value)

    # MICP
    obj = 0

    z_top = [cp.Variable(n) for _ in range(n)]
    z_bottom = [cp.Variable(n) for _ in range(n)]

    x = cp.Variable(n)
    cons = [x[0] == initial_pos, -2 <= x, x <= 2, x[-1] == -1]

    lam_top = cp.Variable(n, boolean=True)
    lam_bottom = cp.Variable(n, boolean=True)

    for z, lam in zip(z_top + z_bottom, lam_top + lam_bottom):
        cons.append(z[0] == initial_pos * lam)
        cons.append(-2 * lam <= z)
        cons.append(z <= 2 * lam)
        cons.append(z[-1] == -1 * lam)

    for i, pos in enumerate(all_pos):
        obj += cp.quad_over_lin(z_top[i][i] + lam_top[i],
                                lam_top[i]) + (1 - lam_top[i])
        obj += cp.quad_over_lin(z_bottom[i][i] - lam_bottom[i],
                                lam_bottom[i]) + (1 - lam_bottom[i])

        for b_pos, b_or in zip(box_pos, box_orientation):
            if b_pos <= pos and pos <= b_pos + box_size:
                for z in z_top + z_bottom + [x]:
                    cons.append(z[i] >= 0 if b_or > 0 else z[i] <= 0)

    for idx, weight in enumerate([10, 1, .1]):
        for z, lam in zip(z_top + z_bottom, lam_top + lam_bottom):
            obj += weight * cp.quad_over_lin(cp.diff(z, k=idx + 1),
                                             lam) / (2 * n)
            obj += weight * cp.quad_over_lin(cp.diff(x - z, k=idx + 1),
                                             1 - lam) / (2 * n)

    prob = cp.Problem(cp.Minimize(obj), cons)
    import sys
    while True:
        answer = input(
            "Are you sure you would like to solve the MICP (y/n) ").lower()

        if answer == "y":
            break
        elif answer == "n":
            return
        else:
            print("Invalid answer.")
            continue

    obj_value = prob.solve(solver=cp.MOSEK, verbose=True)

    print("global optimum:", obj_value)
Esempio n. 12
0
import  hmf
import  numpy              as      np
import  matplotlib         as      mpl
import  matplotlib.pyplot  as      plt
import  pylab              as      pl

from    hmf                import  cosmo
from    astropy.cosmology  import  FlatLambdaCDM
from    utils              import  latexify
from    params             import  get_params
from    scipy.interpolate  import  interp1d


latexify(columns=1, equal=True, fontsize=10, ggplot=True, usetex=True)

params =  get_params()
cosmo  = {'H0': 100. * params['h_100'], 'Om0': 0.3153, 'Ob0': 0.02242 / 0.6736 ** 2.}

mf     =  hmf.MassFunction(cosmo_params = cosmo)
models = [hmf.fitting_functions.ST, hmf.fitting_functions.Tinker08]

def get_stellar2halo(drop_band = 'u'):
  data           = np.loadtxt('../dat/stellar/smhm.txt')

  halo_mass      = data[:,0]               ## [Msun / h]

  dict           = {'u': data[:,1], 'g': data[:,2], 'r': data[:,3]}
  
  stellar_mass   =  dict[drop_band]
  stellar_mass   =  10. ** stellar_mass
  stellar_mass  *= (halo_mass / params['h_100'])  ## Mstellar [Msun]
Esempio n. 13
0
import os
import numpy as np
import pylab as pl
import matplotlib.pyplot as plt

from scipy.interpolate import UnivariateSpline, interp1d
from utils import latexify
from scipy.optimize import curve_fit
from growth_rate import growth_factor

latexify(fig_height=2.,
         fig_width=3.,
         columns=2,
         equal=False,
         fontsize=10,
         ratio=0.5,
         ggplot=True,
         usetex=True)

##  GOLDRUSH
root = os.environ['LBGCMB'] + '/dropouts/bz/dat/'
har = np.loadtxt(root + 'harikane4.dat')
hare = np.loadtxt(root + 'harikane4_err.dat')

zs = np.arange(2.7, 5.1, 0.01)
ii = interp1d(har[:-1, 0],
              har[:-1, 1],
              'linear',
              copy=True,
              bounds_error=False,
              fill_value='extrapolate',
Esempio n. 14
0
                            M=M,
                            lr=1e-2,
                            verbose=True,
                            niter=25)
toc = time.time()
print("time", toc - tic, "s")
xhat, yhat, DT = auto_ks.kalman_smoother(params, y, K & ~M_test, lam)

# evaluate
loss_auto = np.linalg.norm(yhat[M_test] - y[M_test])**2 / M_test.sum()
print("final_test_loss:", loss_auto)

print(np.diag(np.linalg.inv(params.W_neg_sqrt @ params.W_neg_sqrt.T)))
print(np.diag(np.linalg.inv(params.V_neg_sqrt @ params.V_neg_sqrt.T)))

latexify(4, 3)
plt.plot(yhat_initial[::50, 0],
         yhat_initial[::50, 1],
         '--',
         alpha=.5,
         c='black',
         label='before')
plt.plot(yhat[::50, 0], yhat[::50, 1], '-', alpha=.5, c='black', label='after')
plt.scatter(y[::50, 0], y[::50, 1], s=.5, c='black')
plt.legend()
plt.xlabel("East (m)")
plt.ylabel("North (m)")
plt.subplots_adjust(left=.15, bottom=.2)
plt.savefig("figs/driving.pdf")
plt.close()
Esempio n. 15
0
def plot_lc(config, fig, ifile):
	# Get the cadence information.
	with open(ifile, "r", newline="") as f:
		times, fluxs = utils.csv_column_read(f, FIELDS, casts=CASTS, start=config.start, end=config.end)

	# Times and fluxes.
	xs = times
	ys = fluxs

	# Convert flux to ppm.
	#ys = ys / ys.mean() - 1
	#ys *= 1e6

	# Figure out time-related offsets.
	offset = np.min(times)
	xs -= offset
	if config.timestamp is not None:
		config.timestamp -= offset

	if config.fft:
		fx, fy = utils.lombscargle_amplitude(xs, ys, upper=config.high_freq)

		if config.fftout:
			with open(config.fftout, "w", newline="") as f:
				utils.csv_column_write(f, [fx, fy], ["frequency", "amplitude"])

		fx, fy = utils.raw_to_psd(fx, fy, fluxs.var())

	if config.lc:
		if config.period is not None:
			# TODO: We should allow for showing more than one phase.
			xs = (xs % config.period) / config.period
			xs = (xs + config.phase) % 1.0

		# Bin the folded phase plot by taking the average of ranges.
		if config.bins is not None:
			size = 1.0 / config.bins
			nys = np.zeros(config.bins)

			for i in range(config.bins):
				rnge = (i*size <= xs) & (xs < (i+1)*size)
				nys[i] = np.median(ys[rnge])

			ys = nys
			xs = np.arange(config.bins) * size

		# Replication.
		xs = np.tile(xs, config.width) + np.repeat(np.arange(config.width), xs.shape[0])
		ys = np.tile(ys, config.width)

	if config.fft and config.lc:
		ax1 = utils.latexify(fig.add_subplot(211))
	else:
		ax1 = utils.latexify(fig.add_subplot(111))

	if config.lc:
		if not (config.period or config.bins):
			ax1.plot(xs, ys, color="0.5", linestyle="-", marker="None")
			#ax1.plot(xs, ys, color="k", linestyle="None", marker="+", label=r"Kepler/K2 Halo Photometry")
			ax1.set_xlabel("Time ($d$)")
		else:
			# TODO: We should overlay a binned version.
			if config.timestamp is not None:
				predicted = (config.timestamp % config.period) / config.period
				predicted = (predicted + config.phase) % 1.0
				ax1.xaxis.set_ticks(predicted + np.arange(config.width), minor=True)
				ax1.xaxis.grid(True, which="minor", color="r", linestyle="--", linewidth=2)
			ax1.plot(xs, ys, color="0.5", linestyle="None", marker="o", label=r"Kepler/K2 Halo Photometry")
			ax1.set_xlabel("Phase")
		ax1.set_ylabel(r"Intensity (ppm)")

	if config.lc and config.title:
		ax1.set_title(r"Light Curve [%s] # %s" % (description(config), config.comment or ""))

	if config.fft:
		if config.lc:
			ax2 = utils.latexify(fig.add_subplot(212))
		else:
			ax2 = ax1
		ax2.plot(fx, fy, color="k", linestyle="-", marker="None")
		#ax2.xaxis.set_ticks(np.arange(*ax2.get_xlim(), step=1))
		#ax2.xaxis.set_ticks(np.arange(*ax2.get_xlim(), step=0.25), minor=True)
		#ax2.xaxis.grid(True, which="major", color="k", linestyle="--")
		#ax2.xaxis.grid(True, which="minor", color="k", linestyle=":")
		ax2.set_axisbelow(True)
		#ax2.set_xlabel("Frequency ($d^{-1}$)")
		ax2.set_xlabel("Frequency ($\mu$Hz)")
		#ax2.set_ylabel("Amplitude (ppm)")
		ax2.set_ylabel("PDF (ppm$^2$ $\mu$Hz$^{-1}$)")
		if config.maxx > config.minx:
			ax2.set_xlim([config.minx, config.maxx])
		if config.maxy > config.miny:
			ax2.set_ylim([config.miny, config.maxy])

	plt.legend()
	fig.tight_layout()
Esempio n. 16
0
#  Fits to the stellar-mass-halo-mass relation from Ishikawa+17
#  https://xxx.lanl.gov/pdf/1612.06869v2
#  Eqs. (20, 21) and Table 3.
#  This uses the fitting function form of Behroozi+18, which I
#  reverse engineered from the "gen_smhm.py" script in the EDR.

import pylab as pl
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt

from collections import OrderedDict
from utils import latexify
from params import get_params

latexify(fig_width=None, fig_height=None, columns=1, equal=True)

params = get_params()


def smhm_i17(Mh, drop_type):
    '''
    SMHM relation from Ishikawa+17 (arxiv:1612.06869; Eqs. 20,21 & Tab.3).
    Returns Mstar / Mh given Mh in Msun (not Msun/h).
    '''

    if drop_type == 'u':
        aa = 0.25
        M1, eps = 10**12.10, 2.222e-2

    elif drop_type == 'g':
Esempio n. 17
0
from schechter.get_pz import get_pz
from nbar import comovdensity
from get_bz import bz_callmodel
from get_schechters import get_schechters
from get_wschechters import get_wschechter
from growth_rate import growth_rate
from reddy.specs import samplestats as reddy_stats
from goldrush.specs import samplestats as grush_stats
from Malkan.specs import samplestats as malkan_stats
from reddy.pz import get_pz as reddy_getpz
from goldrush import completeness as grush_completeness
from Malkan import completeness as malkan_completeness

latexify(columns=2,
         ratio=0.5,
         equal=False,
         fontsize=12,
         ggplot=True,
         usetex=True)

for color, band in zip(['y', 'b', 'g', 'r'], ['BX', 'u', 'g', 'r']):
    frac = True
    area = 15000.

    deltav = 400.  ##  [km / s].
    kmax = 0.9

    pz = get_pz(band)
    peakz = _peakz(pz)

    fsky = area / 41253.
    sigp = (1. + peakz) * deltav / cosmo.efunc(peakz) / 100.  ##  [Mpc / h].
Esempio n. 18
0
    def prepare_cards(self, parsed, arguments, evaluator, evaluated):
        components, cards, evaluated, is_function = self.get_cards(
            arguments, evaluator, evaluated)

        if is_function:
            latex_input = ''.join([
                '<script type="math/tex; mode=display">',
                latexify(parsed, evaluator), '</script>'
            ])
        else:
            latex_input = mathjax_latex(evaluated)

        result = []

        ambiguity = self.disambiguate(arguments)
        if ambiguity:
            result.append({
                "ambiguity": ambiguity[0],
                "description": ambiguity[1]
            })

        result.append({
            "title": "SymPy",
            "input": removeSymPy(parsed),
            "output": latex_input
        })

        if cards:
            if any(get_card(c).is_multivariate() for c in cards):
                result[-1].update({
                    "num_variables":
                    len(components['variables']),
                    "variables":
                    map(repr, components['variables']),
                    "variable":
                    repr(components['variable'])
                })

        # If no result cards were found, but the top-level call is to a
        # function, then add a special result card to show the result
        if not cards and not components['variable'] and is_function:
            result.append({
                'title':
                'Result',
                'input':
                removeSymPy(parsed),
                'output':
                format_by_type(evaluated, arguments, mathjax_latex)
            })
        else:
            var = components['variable']

            # If the expression is something like 'lcm(2x, 3x)', display the
            # result of the function before the rest of the cards
            if is_function and not is_function_handled(arguments[0]):
                result.append({
                    "title":
                    "Result",
                    "input":
                    "",
                    "output":
                    format_by_type(evaluated, arguments, mathjax_latex)
                })

            line = "simplify(input_evaluated)"
            simplified = evaluator.eval(line,
                                        use_none_for_exceptions=True,
                                        repr_expression=False)
            if (simplified != None and simplified != evaluated
                    and arguments.args and len(arguments.args) > 0
                    and simplified != arguments.args[0]):
                result.append({
                    "title": "Simplification",
                    "input": repr(simplified),
                    "output": mathjax_latex(simplified)
                })
            elif arguments.function == 'simplify':
                result.append({
                    "title": "Simplification",
                    "input": "",
                    "output": mathjax_latex(evaluated)
                })

            for card_name in cards:
                card = get_card(card_name)

                if not card:
                    continue

                try:
                    result.append({
                        'card':
                        card_name,
                        'var':
                        repr(var),
                        'title':
                        card.format_title(evaluated),
                        'input':
                        card.format_input(repr(evaluated), components),
                        'pre_output':
                        latex(card.pre_output_function(evaluated, var)),
                        'parameters':
                        card.card_info.get('parameters', [])
                    })
                except (SyntaxError, ValueError) as e:
                    pass

            if is_function:
                learn_more = find_learn_more_set(arguments[0])
                if learn_more:
                    result.append({
                        "title": "Learn More",
                        "input": '',
                        "output": learn_more
                    })
        return result
Esempio n. 19
0
print('Stratified model')
print('\t', info)
print('\t', anll_test, pred_error)

common = strat_models.LogisticRegression(lambd=.1)
info = common.fit(X_train, Y_train, [0] * len(Y_train), nx.empty_graph(1),
                  **kwargs)
anll_test = common.anll(X_test, Y_test, [0] * len(Y_test))
pred_error = prediction_error(X_test, Y_test, [0] * len(Y_test), common)

print('Common model')
print('\t', info)
print('\t', anll_test, pred_error)

# Visualize
latexify(6)

for i in [17, -1]:
    male_params, female_params = [], []
    for node in strat.G.nodes():
        if node[0] == 'Male':
            male_params.append(strat.G.node[node]['theta'][i][0])
        else:
            female_params.append(strat.G.node[node]['theta'][i][0])
    title = data.columns[i] if i > 0 else 'intercept'
    plt.title(title)
    print(title)
    plt.plot(list_of_ages, male_params, label='Male')
    plt.plot(list_of_ages, female_params, label='Female')
    plt.xlabel('Age')
    plt.legend()
Esempio n. 20
0
print("Common")
print("\t", info)
print("\t", score)

rf = RandomForestRegressor(n_estimators=50, min_samples_leaf=1, n_jobs=-1)
rf.fit(df_train.drop(['log_price', 'lat_bin', 'long_bin'], axis=1),
       df_train['log_price'])
score = rms(
    rf.predict(df_test.drop(['log_price', 'lat_bin', 'long_bin'], axis=1)) -
    df_test['log_price'])
print("RF")
print("\t", score)
print("\t", np.sum([rf.estimators_[i].tree_.node_count for i in range(50)]))

# Visualize
latexify(fig_width=8)
params = np.array(
    [sm_strat.G.node[node]['theta'] for node in sm_strat.G.nodes()])
params = params.reshape(bins, bins, 10)[::-1, :, :]
feats = [
    'bedrooms', 'bathrooms', 'sqft living', 'sqft lot', 'floors', 'waterfront',
    'condition', 'grade', 'yr built', 'intercept'
]
min_lat = df['lat'].min()
max_lat = df['lat'].max()
min_long = df['long'].min()
max_long = df['long'].max()
lat_labels = ["%.1f" % x for x in np.linspace(min_lat, max_lat, 6)]
long_labels = ["%.1f" % x for x in np.linspace(min_long, max_long, 6)]

fig, axes = plt.subplots(2, 5)
Esempio n. 21
0
  data    = np.loadtxt('./dat/planck18_bao.dat')

  zs      = data[:,0]
  fsig8   = data[:,3]
  efsig8  = data[:,4]

  if interp:
    return  zs, interp1d(zs, fsig8, kind='linear', bounds_error=True, assume_sorted=False), interp1d(zs, efsig8, kind='linear', bounds_error=True, assume_sorted=False)

  return  zs, fsig8, efsig8  


if __name__ == '__main__':
    print('\n\nWelcome to Planck18 + BAO.\n\n')

    latexify(fig_width=None, fig_height=None, columns=1, equal=True, usetex=True, fontsize=10)

    zs,  sig8,  esig8 = get_sig8z()
    zs, fsig8, efsig8 = get_fsig8()

    pl.errorbar(zs,          sig8,   esig8, fmt='^', label=r'$\sigma_8(z)$',        capsize=5, markersize=3, linestyle='', alpha=0.6)
    pl.errorbar(zs + 0.085, fsig8,  efsig8, fmt='s', label=r'$f(z) \ \sigma_8(z)$', capsize=5, markersize=3, linestyle='', alpha=0.6)

    pl.xlim(1.8, 5.2)
    pl.ylim(.15, .35)

    pl.xlabel(r'$z$')
    pl.legend(loc=1, frameon=False)

    ##  pl.show()
    pl.savefig('plots/planck18_bao.pdf', bbox_inches='tight')
Esempio n. 22
0
def main(show):
    # generate data
    np.random.seed(243)
    m, n = 5, 1
    n_outliers = 1
    eta = 0.1
    alpha = 0.5
    A = np.random.randn(m, n)
    x_true = np.random.randn(n)
    b = A @ x_true + 1e-1 * np.random.randn(m)
    b[np.random.choice(np.arange(m), replace=False, size=n_outliers)] *= -1.

    # alternating
    x_alternating = cp.Variable(n)
    objective = 0.0
    for i in range(m):
        objective += sccf.minimum(cp.square(A[i]@x_alternating-b[i]), alpha)
    objective += eta * cp.sum_squares(x_alternating)
    prob = sccf.Problem(objective)
    prob.solve()

    # solve relaxed problem
    x_relaxed = cp.Variable(n)
    z = [cp.Variable(n) for _ in range(m)]
    s = cp.Variable(m)
    objective = 0.0
    constraints = [0 <= s, s <= 1]
    for i in range(m):
        objective += cp.quad_over_lin(A[i, :] @ z[i] - b[i] * s[i], s[i]) + (1.0 - s[i]) * alpha  + \
                    eta / m * (cp.quad_over_lin(x_relaxed - z[i], 1.0 - s[i]) + eta / m * cp.quad_over_lin(z[i], s[i]))
    prob = cp.Problem(cp.Minimize(objective), constraints)
    result = prob.solve(solver=cp.MOSEK)

    # alternating w/ relaxed initialization
    x_alternating_perspective = cp.Variable(n)
    x_alternating_perspective.value = x_relaxed.value
    objective = 0.0
    for i in range(m):
        objective += sccf.minimum(cp.square(A[i]@x_alternating_perspective-b[i]), alpha)
    objective += eta * cp.sum_squares(x_alternating_perspective)
    prob = sccf.Problem(objective)
    prob.solve(warm_start=True)

    # brute force evaluate function and perspective
    xs = np.linspace(-5, 5, 100)
    f = np.sum(np.minimum(np.square(A * xs - b[:, None]), alpha), axis=0) + eta*xs**2
    f_persp = []
    for x in xs:
        z = [cp.Variable(n) for _ in range(m)]
        s = cp.Variable(m)

        objective = 0.0
        constraints = [0 <= s, s <= 1]
        for i in range(m):
            objective += cp.quad_over_lin(A[i, :] @ z[i] - b[i] * s[i], s[i]) + (1.0 - s[i]) * alpha + \
                        eta / m * (cp.quad_over_lin(x - z[i], 1.0 - s[i]) + eta / m * cp.quad_over_lin(z[i], s[i]))
        prob = cp.Problem(cp.Minimize(objective), constraints)
        result = prob.solve(solver=cp.MOSEK)
        f_persp.append(result)

    def find_nearest(array, value):
        array = np.asarray(array)
        idx = (np.abs(array - value)).argmin()
        return idx 

    # plot
    latexify(fig_width=6, fig_height=4)
    plt.plot(xs, f, '-', label="$L(x)$", c='k')
    plt.plot(xs, f_persp, '--', label="perspective", c='k')
    plt.plot(x_alternating.value[0], )
    plt.scatter(x_alternating.value[0], f[find_nearest(xs, x_alternating.value[0])], marker='o', label="sccf (no init)", c='k')
    plt.scatter(x_alternating_perspective.value[0], f[find_nearest(xs, x_alternating_perspective.value[0])], marker='*', label="sccf (persp init)", c='k')
    plt.legend()
    plt.xlabel("$x$")
    plt.savefig("figs/perspective.pdf")
    if show:
        plt.show()
Esempio n. 23
0
import numpy as np
import pylab as pl
import matplotlib.pyplot as plt

from schechterfn import SchechterLfn
from utils import latexify

latexify(columns=1,
         equal=True,
         fontsize=12,
         ratio=None,
         ggplot=True,
         usetex=True)

##  Marchesini:  z=2.5
dat = np.loadtxt('dat/data-compilation/smf_ms/marchesini_z2.5.smf')

asymmetric_error = [lower_error, upper_error]

##  Columns:  Log10(stellar mass) (Msun), Log10(ND) (1 / Mpc^3 / dex), Err+ (dex), Err- (dex).
pl.plot(10.**dat[:, 0], 10.**dat[:, 1], label='2.5', markersize=2)

##  Marchesini:  z=3.5
dat = np.loadtxt('dat/data-compilation/smf_ms/marchesini_z3.5.smf')

##  Columns:  Log10(stellar mass) (Msun), Log10(ND) (1/Mpc^3/dex), Err+ (dex), Err- (dex)
pl.plot(10.**dat[:, 0], 10.**dat[:, 1], label='3.5', markersize=2)

##  Table 3 and Fig. (10) of https://arxiv.org/pdf/1507.05636.pdf
##  Schechter fn. form of the Luminosity type.
'''
Esempio n. 24
0
    print("\n\nWelcome to rrtemplate_io.")

    ## data     = np.loadtxt(os.environ['LBGCMB'] + '/quickspectra/spectra/BC03/restframe/spec-BC03-z0.0.dat')
    ## create_template(data[:,0], data[:,1], type = 'LBG', printit = False)

    root = None
    root = os.environ['LBGCMB'] + '/redrock/templates/ext_wave/'

    types = {'Q2_198': None, 'LBG': None, 'ELG': None}
    ## types = {'galaxy': None,  'qso':    None, 'LBG': None, 'star-B': None}
    ## types = {'star-M': None,  'star-K': None, 'star-G': None, 'star-F': None,  'star-A': None, 'star-B': None}

    latexify(fig_width=12.,
             fig_height=6.,
             equal=False,
             fontsize=10,
             ggplot=True,
             usetex=True)

    pl.axvline(x=1216., ymin=0., ymax=1., c='k')

    for type in types:
        types[type] = read_template(type=type, printit=True, root=root)

        for i in np.arange(types[type]['ntemp']):
            index = np.where(
                np.abs(types[type]['wave'] -
                       6.e3) == np.abs(types[type]['wave'] - 6.e3).min())
            norm = types[type]['temp_%d' % i][index]

            pl.plot(types[type]['wave'],
Esempio n. 25
0
import numpy as np
import pandas as pd
import pylab as pl

from utils import latexify

latexify(fig_width=None, fig_height=None, columns=2, equal=False)
"""
Compare the LSST N(i_AB) form assumed to measurements of the Hubble (Ultra) Deep Field (Metcalfe et al.) and Capak ++.
"""


def lsst_Ng(ilim):
    result = 46. * 10.**(
        0.3 * (ilim - 25.)
    )  ## LSST science book (3.7); 46 galaxies per arcmin^2 for ilim = 25.

    return result * 60.**2.  ## galaxies per sq. deg.


def plot_Metcalfe():
    data = pd.read_table(
        "dat/lsst_nz/Metcalfe00.dat",
        skiprows=1,
        sep=r"\s*",
        names=['ilo', 'ihi', 'NorNraw', 'NorNgal', 'SouNraw', 'SouNgal'],
        engine='python')

    print data

    data[
Esempio n. 26
0
def plot_echelle(config, fig, ifile):
	CASTS = [float, float]
	FIELDS = ["frequency", config.pgtype]

	if config.deltanu is None:
		raise NotImplementedError("∆𝜈 autocorrelation not implemented")

	# Get the cadence information.
	with open(ifile, "r", newline="") as f:
		fx, fy = utils.csv_column_read(f, FIELDS, casts=CASTS)

	# Filter start and end.
	filt = numpy.ones_like(fx, dtype=bool)
	if config.start is not None:
		filt &= fx >= config.start * config.deltanu
	if config.end is not None:
		filt &= fx <= (config.end + 1) * config.deltanu

	fx = fx[filt]
	fy = fy[filt]

	# Convert from power to amplitude.
	if config.pgtype == "psd":
		pass
		#fy *= numpy.diff(fx).mean()
		#fy **= 0.5

	# First we bin the transform.
	width = config.smoothwidth
	bins = int(fx.ptp() // width)

	newfy = numpy.zeros(bins)
	for i in range(bins):
		rnge = (i*width <= fx) & (fx < (i+1)*width)
		newfy[i] = fy[rnge].sum()

	fy = newfy
	fx = numpy.linspace(fx.min(), fx.max(), bins)

	# Then we compute the grid sizes.
	delta = numpy.diff(fx).mean()
	rows = int(fx.ptp() // config.deltanu)
	cols = int(config.deltanu // delta)

	# Fill in the grid from the binned data.
	grid = numpy.zeros([rows, cols])
	for y, _ in enumerate(grid):
		grid[y,...] = fy[y*cols:(y+1)*cols]

	ax = utils.latexify(fig.add_subplot(111))
	#ax.imshow(-grid, cmap="gray", interpolation="nearest", origin="bottom")
	#ax.imshow(grid, cmap="viridis", interpolation="nearest", origin="bottom")
	ax.pcolormesh(grid, cmap="viridis")
	#ax.pcolormesh(-grid, cmap="gray")
	#ax.xaxis.set_ticks(numpy.arange)
	ax.xaxis.set_ticks
	ax.set_xlim([0, config.deltanu])
	ax.xaxis.set_ticks(numpy.arange(*ax.get_xlim(), step=20))
	ax.xaxis.set_ticks(numpy.arange(*ax.get_xlim(), step=5), minor=True)
	ax.set_ylim([config.start, config.end-config.start])
	ax.yaxis.set_ticks(numpy.arange(*ax.get_ylim(), step=1)+1)
	#ax.yaxis.set_ticks(numpy.arange(*ax.get_ylim(), step=0.25), minor=True)
	#ax.set_ylabel("Frequency ($\mu$Hz)")
	ax.set_ylabel("Mode Order")
	ax.set_xlabel("Frequency mod %s ($\mu$Hz)" % (config.deltanu,))

	plt.legend()
	fig.tight_layout()
Esempio n. 27
0
        ##  Load with np.nan (coloured white) where no successful redshift is available.
        zv = np.zeros_like(yv) * np.nan

        for row in rrresult:
            ##  Where a successful redshift was available, load with shortest exposure time.
            zv[(xv == row[0]) & (yv == row[1])] = row[2] / 60. / 60.

        zv = np.ma.masked_invalid(zv)

        pl.clf()

        ##  And plot.
        latexify(fig_width=None,
                 fig_height=None,
                 columns=1,
                 equal=True,
                 ggplot=False)

        fig = pl.gcf()

        cmap = plt.get_cmap('viridis')
        cmap.set_bad(color='white', alpha=1.)

        plt.pcolormesh(xv, yv, zv, cmap=cmap, vmin=0., vmax=6.)

        cbar = plt.colorbar()
        cbar.ax.set_ylabel('Exposure time [hours]', rotation=270, labelpad=20)

        pl.xlim(redshifts[0], redshifts[-1])
Esempio n. 28
0
        results = np.array(results)

        np.savetxt(os.environ['LBGCMB'] + '/mqw/dat/mqw_%sdrops.txt' % band,
                   results,
                   fmt='%.4le',
                   delimiter='\t')

    ##  And plot ...
    data = np.loadtxt(os.environ['LBGCMB'] + '/mqw/dat/mqw_%sdrops.txt' % band)

    Nsz = np.unique(data[:, 0])  ##  N spec.
    mms = np.unique(data[:, 1])  ##  mag. lim.
    Npz = np.unique(data[:, 2])  ##  N phot.

    ##
    latexify(fig_height=2.2, columns=2, fontsize=12)

    if add_desi:
        fig, axs = plt.subplots(1, 4, sharey=True)
        index = 1

    else:
        fig, axs = plt.subplots(1, 3, sharey=True)
        index = 0

    for kk, percentile in enumerate(percentiles):
        for ii, mm in enumerate(mms):
            dat = data[data[:, 1] == mm]
            Np = dat[0, 2]

            if (kk == index) & (mm in [24.0, 24.5]):