Ejemplo n.º 1
0
Archivo: cloudy.py Proyecto: nhmc/H2
def main():
    if not os.path.lexists('grid.cfg'):
        print ('./grid.cfg file not found, writing an example grid.cfg to '
               'the current directory')
        write_example_grid_config()
        sys.exit()

    cfg = read_config('grid.cfg')

    print ''
    print 'Input values:'
    for k in sorted(cfg):
        print '  %s: %s' % (k, cfg[k])
    print ''

    if cfg.table is None:
        fluxname = cfg.prefix + '_temp_uvb.dat'
        uvb = calc_uvb(cfg.z, cfg.cuba_name, match_fg=True)
        writetable('cloudy_jnu_HM.tbl', [uvb['energy'], uvb['logjnu']],
                   overwrite=1,
                   units=['Rydbergs', 'log10(erg/s/cm^2/Hz/ster)'],
                   names=['energy', 'jnu'])

        if cfg.distance_starburst_kpc is not None:
            wa, F = read_starburst99('starburst.spectrum1')
            nu, logjnu = calc_local_jnu(wa, F, cfg.distance_starburst_kpc,
                                        cfg.fesc)
            energy = nu * hplanck / Ryd
            # use HM uvb energy limits
            cond = between(uvb['energy'], energy[0], energy[-1])
            logjnu1 = np.interp(uvb['energy'][cond], energy, logjnu)
            uvb['logjnu'][cond] = np.log10(10**uvb['logjnu'][cond] +
                                           10**logjnu1)
            writetable('cloudy_jnu_total.tbl', [uvb['energy'], uvb['logjnu']],
                       overwrite=1,
                       units=['Rydbergs', 'log10(erg/s/cm^2/Hz/ster)'],
                       names=['energy', 'jnu'])

        write_uvb(fluxname, uvb['energy'], uvb['logjnu'], cfg.overwrite)

        # Fnu at 1 Rydberg
        k = np.argmin(np.abs(uvb['energy'] - 1.))
        logfnu912 = np.log10(10**uvb['logjnu'][k] * 4 * pi)
    else:
        logfnu912 = cfg.logfnu912
        fluxname = None

    write_grid_input(cfg, fnu912=logfnu912, fluxfilename=fluxname, table=cfg.table,
                     abundances=cfg.abundances)

    if cfg.run_cloudy:
        run_grid(nproc=cfg.nproc, overwrite=cfg.overwrite)

    models = parse_grid2(cfg)

    filename = cfg.prefix + '_grid.sav.gz'
    print 'Writing to', filename
    saveobj(filename, models, overwrite=cfg.overwrite)
Ejemplo n.º 2
0
def save_samples(filename, sampler, pos, state):
    saveobj(filename,
            dict(chain=sampler.chain,
                 accept=sampler.acceptance_fraction,
                 lnprob=sampler.lnprobability,
                 final_pos=pos,
                 state=state,
                 par=P),
            overwrite=1)
Ejemplo n.º 3
0
def x_contifit(specfil, outfil=None, savfil=None, redshift=0., divmult=1, forest_divmult=1):

    import os
    import barak.fitcont as bf
    from barak.spec import read
    from barak.io import saveobj, loadobj
    import xastropy.spec.readwrite as xsr
    reload(xsr)
    reload(bf)

    # Initialize
    if savfil == None:
        savfil = 'conti.sav'
    if outfil == None:
        outfil = 'conti.fits'
        
    # Read spectrum + convert to Barak format
    sp = xsr.readspec(specfil)
    

    # Fit spline continuum:
    if os.path.lexists(savfil): #'contfit_' + name + '.sav'):
        option = raw_input('Adjust old continuum? (y)/n: ')
        if option.lower() != 'n':
            co_old, knots_old = loadobj(savfil) #'contfit_' + name + '.sav')
            co, knots = bf.fitqsocont(sp.wa, sp.fl, sp.er, redshift,
                oldco=co_old, knots=knots_old,
                divmult=divmult,
                forest_divmult=forest_divmult)
        else:
            co, knots = bf.fitqsocont(sp.wa, sp.fl, sp.er, redshift,
                divmult=divmult,
                forest_divmult=forest_divmult)
    else:
        co, knots = bf.fitqsocont(sp.wa, sp.fl, sp.er, redshift,
            divmult=divmult,
            forest_divmult=forest_divmult)
    
    os.remove('_knots.sav')

    # Save continuum:
    saveobj(savfil, (co, knots), overwrite=1)

    # Check continuum:
    print('Plotting new continuum')
    plt.clf()
    plt.plot(sp.wa, sp.fl, drawstyle='steps-mid')
    plt.plot(sp.wa, sp.co, color='r')
    plt.show()

    # Repeat?
    confirm = raw_input('Keep continuum? (y)/n: ')
    if confirm == 'y':
        fits.writeto(outfil, sp, clobber=True)
    else:
        print('Writing to tmp.fits anyhow!')
        fits.writeto('tmp.fits', sp, clobber=True)
Ejemplo n.º 4
0
def read_redmapper():
    d = fits.getdata(prefix + 'clusters/redmapper/'
                     'dr8_run_redmapper_v5.10_lgt5_catalog.fits')
    #d = fits.getdata(prefix + 'clusters/redmapper/DR8/'
    #                 'dr8_run_redmapper_v5.10_lgt5_catalog.fit')

    z = d['Z_LAMBDA']
    c0 = d['BCG_SPEC_Z'] != -1
    z[c0] = d['BCG_SPEC_Z'][c0]
    zer = d['Z_LAMBDA_E']
    if CLUS_ZERR == 'erphotz':
        zer[c0] = 0.001
    elif isinstance(CLUS_ZERR, float):
        zer[:] = CLUS_ZERR
    else:
        raise ValueError

    # 0.005 corresponds to a velocity dispersion of 937 km/s at z=0.6
    zer = np.where(zer < 0.005, 0.005, zer)

    if os.path.exists('dc_redmapper.sav'):
        rlos = loadobj('dc_redmapper.sav')
        assert len(rlos) == len(d)
    else:
        # this takes about 5 min to run
        print 'calculating comoving distances'
        rlos = cosmo.comoving_distance(z)
        saveobj('dc_redmapper.sav', rlos)

    # in solar masses, conversion from Rykoff 2013 appendix B.
    m200 = m200_from_richness(d['LAMBDA_CHISQ'])

    d1 = np.rec.fromarrays([
        d.RA, d.DEC, z, zer, d.LAMBDA_CHISQ, d.MEM_MATCH_ID, rlos.value, m200
    ],
                           names='ra,dec,z,zer,richness,id,rlos,m200')
    d2 = d1[d1.z > ZMIN_CLUS]
    d3 = d2[between(np.log10(d2['m200']), log10MINMASS, log10MAXMASS)]

    iclus_from_id = {idval: i for i, idval in enumerate(d3.id)}
    return d3, iclus_from_id
Ejemplo n.º 5
0
def read_redmapper():
    #d = fits.getdata(prefix + 'clusters/redmapper/'
    #                 'dr8_run_redmapper_v5.10_lgt5_catalog.fits')
    d = fits.getdata(prefix + 'clusters/redmapper/DR8/'
                     'dr8_run_redmapper_v5.10_lgt5_catalog.fit')

    z = d['Z_LAMBDA']
    c0 = d['BCG_SPEC_Z'] != -1 
    z[c0] = d['BCG_SPEC_Z'][c0]
    zer = d['Z_LAMBDA_E']
    if CLUS_ZERR == 'erphotz':
        zer[c0] = 0.001
    elif isinstance(CLUS_ZERR, float):
        zer[:] = CLUS_ZERR
    else:
        raise ValueError

    # 0.005 corresponds to a velocity dispersion of 937 km/s at z=0.6 
    zer = np.where(zer < 0.005, 0.005, zer)

    if os.path.exists('dc_redmapper.sav'):
        rlos = loadobj('dc_redmapper.sav')
        assert len(rlos) == len(d)
    else:
        # this takes about 5 min to run
        print 'calculating comoving distances'
        rlos = cosmo.comoving_distance(z)
        saveobj('dc_redmapper.sav', rlos)

    # in solar masses, conversion from Rykoff 2013 appendix B.
    m200 = m200_from_richness(d['LAMBDA_CHISQ'])


    d1 = np.rec.fromarrays([d.RA, d.DEC, z, zer,
                            d.LAMBDA_CHISQ, d.MEM_MATCH_ID, rlos.value, m200],
                           names='ra,dec,z,zer,richness,id,rlos,m200')
    d2 = d1[between(d1.z, ZMIN_CLUS, ZMAX_CLUS)]
    d3 = d2[between(np.log10(d2['m200']), log10MINMASS, log10MAXMASS)]

    iclus_from_id = {idval:i for i,idval in enumerate(d3.id)}
    return d3, iclus_from_id
Ejemplo n.º 6
0
                rho[ibin]['cid'].append(p['cid'])
                rho[ibin]['pid'].append(p['pid'])
                rho[ibin]['qid'].append(p['qid'])
    
        # count the total redshift path per bin, and the total bumber 
        for i in range(len(rho)):
            zpathlim = np.array(rho[i]['zpathlim'])
            if len(zpathlim) == 0:
                rho[i]['zpathtot'] = 0
            else:
                rho[i]['zpathtot'] = (zpathlim[:,1] - zpathlim[:,0]).sum()
            # ids of absorbers matching clusters
            rho[i]['abid'] = list(flatten(rho[i]['abid']))
    
        print 'Saving to', outname
        saveobj(outname, rho, overwrite=1)


if PLOTRES:

    outname = run_id + '/rho_dNdz_clus.sav'
    fig3 = plt.figure(3, figsize=(7.5,7.5))
    #fig3.subplots_adjust(left=0.16)
    fig3.clf()
    ax = plt.gca()
    ax.set_title(run_id)

    ewbins = Bins([0.6, 5.0])
    labels = ['0.6 < Wr$_{2796}$ < 5']
    colors = 'g'
    symbols = 'o'
Ejemplo n.º 7
0
                rho[ibin]['cid'].append(p['cid'])
                rho[ibin]['pid'].append(p['pid'])
                rho[ibin]['qid'].append(p['qid'])

        # count the total redshift path per bin, and the total bumber
        for i in range(len(rho)):
            zpathlim = np.array(rho[i]['zpathlim'])
            if len(zpathlim) == 0:
                rho[i]['zpathtot'] = 0
            else:
                rho[i]['zpathtot'] = (zpathlim[:, 1] - zpathlim[:, 0]).sum()
            # ids of absorbers matching clusters
            rho[i]['abid'] = list(flatten(rho[i]['abid']))

        print 'Saving to', outname
        saveobj(outname, rho, overwrite=1)

if PLOTRES:

    outname = run_id + '/rho_dNdz_clus.sav'
    fig3 = plt.figure(3, figsize=(7.5, 7.5))
    #fig3.subplots_adjust(left=0.16)
    fig3.clf()
    ax = plt.gca()
    ax.set_title(run_id)

    ewbins = Bins([0.6, 5.0])
    labels = ['0.6 < Wr$_{2796}$ < 5']
    colors = 'g'
    symbols = 'o'
    offsets = [0]
Ejemplo n.º 8
0
def x_contifit(specfil,
               outfil=None,
               savfil=None,
               redshift=0.,
               divmult=1,
               forest_divmult=1):

    import os
    import barak.fitcont as bf
    from barak.spec import read
    from barak.io import saveobj, loadobj
    import xastropy.spec.readwrite as xsr
    reload(xsr)
    reload(bf)

    # Initialize
    if savfil == None:
        savfil = 'conti.sav'
    if outfil == None:
        outfil = 'conti.fits'

    # Read spectrum + convert to Barak format
    sp = xsr.readspec(specfil)

    # Fit spline continuum:
    if os.path.lexists(savfil):  #'contfit_' + name + '.sav'):
        option = raw_input('Adjust old continuum? (y)/n: ')
        if option.lower() != 'n':
            co_old, knots_old = loadobj(savfil)  #'contfit_' + name + '.sav')
            co, knots = bf.fitqsocont(sp.wa,
                                      sp.fl,
                                      sp.er,
                                      redshift,
                                      oldco=co_old,
                                      knots=knots_old,
                                      divmult=divmult,
                                      forest_divmult=forest_divmult)
        else:
            co, knots = bf.fitqsocont(sp.wa,
                                      sp.fl,
                                      sp.er,
                                      redshift,
                                      divmult=divmult,
                                      forest_divmult=forest_divmult)
    else:
        co, knots = bf.fitqsocont(sp.wa,
                                  sp.fl,
                                  sp.er,
                                  redshift,
                                  divmult=divmult,
                                  forest_divmult=forest_divmult)

    os.remove('_knots.sav')

    # Save continuum:
    saveobj(savfil, (co, knots), overwrite=1)

    # Check continuum:
    print('Plotting new continuum')
    plt.clf()
    plt.plot(sp.wa, sp.fl, drawstyle='steps-mid')
    plt.plot(sp.wa, sp.co, color='r')
    plt.show()

    # Repeat?
    confirm = raw_input('Keep continuum? (y)/n: ')
    if confirm == 'y':
        fits.writeto(outfil, sp, clobber=True)
    else:
        print('Writing to tmp.fits anyhow!')
        fits.writeto('tmp.fits', sp, clobber=True)
Ejemplo n.º 9
0
                else:
                    objects[obj][arm][filter].append(n)
            elif imtype == 'FOCUS' or \
                     imtype == 'flat' and hd['NEXTEND'] == 1:
                unused.append(n)
            else:
                unknown.append(n)

        print len(biases), 'biases'
        print len(objects), 'imaging targets found:'
        print ' ', '\n  '.join(textwrap.wrap(' '.join(objects)))
        print len(flats), 'flats found:'
        print ' ', '\n  '.join(textwrap.wrap(' '.join(flats)))
        print len(unknown), 'unidentified exposures:'
        print ' ', '\n  '.join(textwrap.wrap(' '.join(unknown)))
        saveobj('_sort_LBC.sav',
                dict(biases=biases, objects=objects, flats=flats))
        # could be a bug writing out an empty file?

        writetxt('sort_LBC_unused', [unused], overwrite=1)

if 1:
    # make links to the biases
    if len(biases) > 0:
        makedir('bias')
    for arm in biases:
        biasdir = 'bias/' + arm_str[arm]
        makedir(biasdir)
        makedir(biasdir + '/raw', clean=True)
        names = []
        for filename in sorted(biases[arm]):
            n = filename.rsplit('/')[-1]
Ejemplo n.º 10
0
            for j in range(num_M):
                mval = 10**logMvals[j]
                print(s + '  {} of {}, M={:g}'.format(j+1, num_M, mval))
                res = Phm_integrand(kval, mval)
                #print(res)
                out[i,j] = res
    
            # save as we go
            #with open('temp.npz', 'w') as fh:
            #    print('Updating temp.npz')
            #    np.savez(fh, out=out, logk=logkvals, logM=logMvals)

        t2 = time.time()
        print('Total time elapsed {} min'.format((t2 - t1) / 60))

    saveobj('SIGMA_CACHE.sav', SIGMA_CACHE, overwrite=1)
    
    Phm_term = np.trapz(out, x=10**logMvals, axis=-1)

    Phm = Phm_term * M['bias'] * power_spectrum(10**logkvals, 0, **COSMO1)

    kvals = 10**logkvals

    # now take the fourier transform of Phm to get the correlation function.

    # xi(r) = ind dk^3 / (2 * pi)^3 * P(k) * exp(i * k dot r)

    # xi(r) = ind dk / (2 * pi) * P(k) * exp(i * k * r)

    # Note that exp(i * k * r) = cos(kr) + i * sin(kr). We only want
    # the real part.
Ejemplo n.º 11
0
def main():
    if not os.path.lexists('grid.cfg'):
        print ('./grid.cfg file not found, writing an example grid.cfg to '
               'the current directory')
        write_example_grid_config()
        sys.exit()

    cfg = read_config('grid.cfg')

    print ''
    print 'Input values:'
    for k in sorted(cfg):
        print '  %s: %s' % (k, cfg[k])
    print ''

    if cfg.table is None:
        fluxname = cfg.prefix + '_temp_uvb.dat'
        if cfg.cuba_name is None:
            cfg.cuba_name = get_data_path() + 'UVB.out'

        uvb = calc_uvb(cfg.z, cfg.cuba_name, match_fg=False)

        writetable('cloudy_jnu_HM.tbl', [uvb['energy'], uvb['logjnu']],
                   overwrite=1,
                   units=['Rydbergs', 'erg/s/cm^2/Hz/ster'],
                   names=['energy', 'log10jnu'])

        if cfg.uvb_tilt is not None:
            if cfg.distance_starburst_kpc is not None:
                raise RuntimeError('Must only specify one of uvb_tilt and\
                distance_starburst_kpc!')

            # remember which bits had 1e-30
            clow = uvb['logjnu'] == -30
            # tilt the UV background between 1 and 10 Rydbergs
            logjnu = tilt_spec(cfg.uvb_tilt, uvb['energy'], uvb['logjnu'],
                               emin=1, emax=10)

            logjnu[clow] = -30

            print('Tilting UVB using parameter {}'.format(cfg.uvb_tilt))

            # now re-normalise to match the photoionization rate of the
            # default spectrum.

            gamma_default = find_gamma(uvb['energy'], 10**uvb['logjnu'])
            mult = gamma_default / find_gamma(uvb['energy'], 10**logjnu)
            print 'Scaling tilted Jnu by %.3g to match default gamma' % mult
            logjnu = logjnu + np.log10(mult)

            writetable('cloudy_jnu_tilted.tbl', [uvb['energy'], logjnu],
                       overwrite=1,
                       units=['Rydbergs', 'erg/s/cm^2/Hz/ster'],
                       names=['energy', 'log10jnu'])
            uvb['logjnu'] = logjnu

        elif cfg.distance_starburst_kpc is not None:
            wa, F = read_starburst99(get_data_path() + 'starburst.spectrum1')
            nu, logjnu = calc_local_jnu(wa, F, cfg.distance_starburst_kpc,
                                        cfg.fesc)
            energy = nu * hplanck / Ryd
            # use HM uvb energy limits
            cond = between(uvb['energy'], energy[0], energy[-1])
            logjnu1 = np.interp(uvb['energy'][cond], energy, logjnu)
            uvb['logjnu'][cond] = np.log10(10**uvb['logjnu'][cond] +
                                           10**logjnu1)
            writetable('cloudy_jnu_total.tbl', [uvb['energy'], uvb['logjnu']],
                       overwrite=1,
                       units=['Rydbergs', 'erg/s/cm^2/Hz/ster'],
                       names=['energy', 'log10jnu'])

        write_uvb(fluxname, uvb['energy'], uvb['logjnu'], cfg.overwrite)

        # Fnu at 1 Rydberg
        k = np.argmin(np.abs(uvb['energy'] - 1.))
        logfnu912 = np.log10(10**uvb['logjnu'][k] * 4 * pi)
    else:
        logfnu912 = cfg.logfnu912
        fluxname = None

    write_grid_input(cfg, fnu912=logfnu912, fluxfilename=fluxname, table=cfg.table,
                     abundances=cfg.abundances)

    if cfg.run_cloudy:
        run_grid(nproc=cfg.nproc, overwrite=cfg.overwrite)

    models = parse_grid(cfg)

    filename = cfg.prefix + '_grid.sav.gz'
    print 'Writing to', filename
    saveobj(filename, models, overwrite=cfg.overwrite)
    savehdf5(filename.replace('.sav.gz', '.hdf5'), models,
             overwrite=cfg.overwrite)
Ejemplo n.º 12
0
Archivo: run_mcmc.py Proyecto: nhmc/LAE
def save_samples(filename, sampler, pos, state):
    saveobj(filename, dict(
        chain=sampler.chain, accept=sampler.acceptance_fraction,
        lnprob=sampler.lnprobability, final_pos=pos, state=state,
        par=P), overwrite=1)
Ejemplo n.º 13
0
                else:
                    objects[obj][arm][filter].append(n)
            elif imtype == 'FOCUS' or \
                     imtype == 'flat' and hd['NEXTEND'] == 1:
                unused.append(n)
            else:
                unknown.append(n)

        print len(biases), 'biases'
        print len(objects), 'imaging targets found:'
        print ' ', '\n  '.join(textwrap.wrap(' '.join(objects)))
        print len(flats), 'flats found:'
        print ' ', '\n  '.join(textwrap.wrap(' '.join(flats)))
        print len(unknown), 'unidentified exposures:'
        print ' ', '\n  '.join(textwrap.wrap(' '.join(unknown)))
        saveobj('_sort_LBC.sav', dict(biases=biases,objects=objects,flats=flats))
        # could be a bug writing out an empty file?
        
        writetxt('sort_LBC_unused', [unused], overwrite=1)

if 1:
    # make links to the biases
    if len(biases) > 0:
        makedir('bias')
    for arm in biases:
        biasdir = 'bias/' + arm_str[arm]
        makedir(biasdir)
        makedir(biasdir + '/raw', clean=True)
        names = []
        for filename in sorted(biases[arm]):
            n = filename.rsplit('/')[-1]