Example #1
0
def Build_VAGCdr72bright34():
    ''' Build hdf5 file of VAGC dr72brigh34, which is the
    parent sample of Jeremy's group catalog 
    '''
    # import VAGC dr72bright34
    vagc_photoinfo = np.loadtxt(''.join(
        [UT.dir_dat(), 'vagc/', 'photoinfo_nonan.dr72bright34.dat']),
                                unpack=True,
                                usecols=[0, 1, 2, 3, 4, 5])
    vagc_lss = np.loadtxt(''.join(
        [UT.dir_dat(), 'vagc/', 'lss.dr72bright34.dat']),
                          unpack=True,
                          usecols=[0, 3, 4, 5])
    if not np.array_equal(vagc_photoinfo[0], vagc_lss[0]):
        raise ValueError

    catalog = {}
    catalog['id'] = vagc_photoinfo[0]
    for i_band, band in enumerate(['u', 'g', 'r', 'i', 'z']):
        catalog['M_' + band] = vagc_photoinfo[i_band + 1]
    catalog['ra'] = vagc_lss[1]
    catalog['dec'] = vagc_lss[2]
    catalog['cz'] = vagc_lss[3]
    catalog['z'] = vagc_lss[3] / 299792.458
    print len(catalog['z']), ' total galaxies'

    dr72_file = ''.join([UT.dir_dat(), 'vagc/', 'VAGCdr72bright34.hdf5'])

    f = h5py.File(dr72_file, 'w')
    grp = f.create_group('data')
    for key in catalog.keys():
        grp.create_dataset(key, data=catalog[key])
    f.close()
    return None
Example #2
0
    def File(self):
        ''' Conformity catalog file name 
        '''
        if self.catalog_name == 'tinker':
            conform_file = ''.join([
                UT.dir_dat(), 'conform_catalog/', 'MPAJHU_TinkerGroupCat.Mr',
                str(self.Mrcut), '.Mass',
                str(self.M_cut),
                self._FileSpec(), '.p'
            ])
        elif self.catalog_name == 'tinkauff':
            conform_file = ''.join([
                UT.dir_dat(), 'conform_catalog/',
                'VAGCdr72brigh34_MPAJHU.GroupCat.Mass',
                str(self.M_cut),
                self._FileSpec(), '.p'
            ])
        elif self.catalog_name == 'tinkauff_iso':
            conform_file = ''.join([
                UT.dir_dat(), 'conform_catalog/',
                'VAGCdr72brigh34_MPAJHU.IsoGroupCat.Mass',
                str(self.M_cut),
                self._FileSpec(), '.p'
            ])
        elif self.catalog_name == 'kauff':
            conform_file = ''.join([
                UT.dir_dat(), 'conform_catalog/',
                'VAGCdr72brigh34_MPAJHU.Kauffmann',
                self._FileSpec(), '.p'
            ])
        else:
            raise ValueError

        return conform_file
Example #3
0
def Build_TinKauffGroupCat(Mass_cut=9.25):
    ''' Compile the outputs of Jeremy's Group Catalog algorithm
    in order to generate catalogs analogous to Kauffmann et al.(2013). 
    Hence TinKauff. 
    '''
    # galdata_corr file
    galdata_file = ''.join([
        UT.dir_dat(), 'tinkauff/', 'clf_groups_JHU_M',
        str(Mass_cut), '_z0.017_fibcoll.galdata_corr'
    ])
    gal_data = np.loadtxt(galdata_file, unpack=True, usecols=range(1, 14))

    catalog = {
        'id': gal_data[0],
        'ra': gal_data[5] * 57.2957795,
        'dec': gal_data[6] * 57.2957795,
        'M_r': gal_data[1],
        'M_g': gal_data[2],
        'z': gal_data[3] / 299792.458,
        'mass_tot_mpajhu': np.log10(gal_data[4]),
        'Dn4000': gal_data[7],
        'ssfr_tot_mpajhu': gal_data[8],
        'ssfr_fib_mpajhu': gal_data[9],
        'sfr_tot_mpajhu': gal_data[10],
        'sfr_fib_mpajhu': gal_data[11],
        'mass_fib_mpajhu': gal_data[12],
    }

    # prob data
    prob_file = ''.join([
        UT.dir_dat(), 'tinkauff/', 'clf_groups_JHU_M',
        str(Mass_cut), '_z0.017_fibcoll.prob'
    ])
    prob_data = np.loadtxt(prob_file, unpack=True, usecols=[1, 2, 5, 12])
    if not np.array_equal(catalog['id'], prob_data[0]):
        raise ValueError
    catalog['p_sat'] = prob_data[2]
    catalog['group_id'] = prob_data[1]
    catalog['angradius_halo'] = prob_data[3]
    # cuts
    if catalog['ssfr_tot_mpajhu'].min() == -999.:
        N_cat = len(catalog['p_sat'])
        nan_cuts = np.where(catalog['ssfr_tot_mpajhu'] != -999.)
        for key in catalog.keys():
            catalog[key] = catalog[key][nan_cuts]

        print 'removed = ', N_cat - len(catalog['p_sat'])

    tinkauff_file = ''.join([
        UT.dir_dat(), 'tinkauff/', 'VAGCdr72_MPAJHU.GroupCat.Mass',
        str(Mass_cut), '.hdf5'
    ])
    f = h5py.File(tinkauff_file, 'w')
    grp = f.create_group('data')

    for column in catalog.keys():
        grp.create_dataset(column, data=catalog[column])
    f.close()
    return None
Example #4
0
def Build_TinkerCatalog(Mrcut=18):
    ''' Preprocess the group catalog data into a more python friendly format
    with appropriate *little h* corrections!
    '''
    h = 0.7
    M_cut = Tinker_Masscut(Mrcut)
    # Read Group Catalog GalData
    galdata_file = ''.join([
        UT.dir_dat(), 'tinker2011catalogs/', 'clf_groups_M',
        str(Mrcut), '_',
        str(M_cut), '_D360.', 'galdata_corr.fits'
    ])
    gal_data = mrdfits(galdata_file)

    catalog = {}
    for column in gal_data.__dict__.keys():
        column_data = getattr(gal_data, column)
        if column == 'stellmass':
            # stellmass is in units of Msol/h^2
            # why jeremy why?/?
            column_data = column_data / h**2
            catalog['mass'] = np.log10(column_data)  # convert to log Mass
        elif column == 'ssfr':
            column_data += np.log10(h**2)  # little h #*(@*#$
            catalog['ssfr'] = column_data
        elif column == 'cz':  # convert to z
            catalog['z'] = column_data / 299792.458
        elif column in ['ra', 'dec']:
            catalog[column] = column_data * 57.2957795
        else:
            catalog[column] = column_data
    catalog['sfr'] = catalog['mass'] + catalog[
        'ssfr']  # calculate SFR form mass and ssfr

    # Read Group Catalog probability
    prob_file = ''.join([
        UT.dir_dat(), '/tinker2011catalogs/', 'clf_groups_M',
        str(Mrcut), '_',
        str(M_cut), '_D360.', 'prob.fits'
    ])
    prob_data = mrdfits(prob_file)  # import probability file
    for column in prob_data.__dict__.keys():
        catalog[column] = getattr(prob_data, column)

    tinker_file = ''.join([
        UT.dir_dat(), 'tinker2011catalogs/', 'GroupCat.Mr',
        str(Mrcut), '.Mass',
        str(M_cut), '.D360.hdf5'
    ])

    f = h5py.File(tinker_file, 'w')
    grp = f.create_group('data')
    for key in catalog.keys():
        grp.create_dataset(key, data=catalog[key])

    f.close()
    return None
Example #5
0
def VAGCdr72bright34_Catalog():
    ''' Read in the VAGC dr72bright34 catalog hdf5 file
    '''
    dr72_file = ''.join([UT.dir_dat(), 'vagc/', 'VAGCdr72bright34.hdf5'])
    catalog = {}
    f = h5py.File(dr72_file, 'r')
    grp = f['data']
    for col in grp.keys():
        catalog[col] = grp[col].value
    f.close()
    return catalog
Example #6
0
def KauffmannParent():
    ''' Read in the the Kauffmannn et al. (2013) parent sample constructed from 
    VAGC dr72bright34 catalog 
    '''
    dr72_file = ''.join([UT.dir_dat(), 'vagc/', 'VAGCdr72.Kauff2013cut.hdf5'])
    catalog = {}
    f = h5py.File(dr72_file, 'r')
    grp = f['data']
    for col in grp.keys():
        catalog[col] = grp[col].value
    f.close()
    return catalog
Example #7
0
def TinKauffGroupCat(Mass_cut=9.25):
    ''' Read in the Tinker-Kauffmann Group catalog generated from VAGC 
    dr72bright34 with MPA-JHU galaxy property values. 
    '''
    dr72_file = ''.join([
        UT.dir_dat(), '/tinkauff/', 'VAGCdr72_MPAJHU.GroupCat.Mass',
        str(Mass_cut), '.hdf5'
    ])
    catalog = {}
    f = h5py.File(dr72_file, 'r')
    grp = f['data']
    for col in grp.keys():
        catalog[col] = grp[col].value
    f.close()
    return catalog
Example #8
0
def MPAJHU_TinkerCatalog(Mrcut=18):
    ''' Read in the Tinker et al. (2011) matched up to the MPA-JHU catalog
    and return catalog dictionary 
    '''
    M_cut = Tinker_Masscut(Mrcut)
    mpajhu_tinker_file = ''.join([
        UT.dir_dat(), 'tinker2011catalogs/', 'GroupCat.Mr',
        str(Mrcut), '.Mass',
        str(M_cut), '.D360.MPAJHU.hdf5'
    ])

    catalog = {}
    f = h5py.File(mpajhu_tinker_file, 'r')
    grp = f['data']
    for col in grp.keys():
        catalog[col] = grp[col].value

    f.close()
    return catalog
Example #9
0
def TinkerCatalog(Mrcut=18):
    ''' Tinker et al. (2011) group catalog combined into a 
    volume-limited galaxy catalog and return a dictionary with
    all the value. 
    '''
    M_cut = Tinker_Masscut(Mrcut)
    # read in h5py file
    tinker_file = ''.join([
        UT.dir_dat(), 'tinker2011catalogs/', 'GroupCat.Mr',
        str(Mrcut), '.Mass',
        str(M_cut), '.D360.hdf5'
    ])

    catalog = {}
    f = h5py.File(tinker_file, 'r')
    grp = f['data']
    for col in grp.keys():
        catalog[col] = grp[col].value

    f.close()
    return catalog
Example #10
0
def Build_MPAJHU_TinkerCatalog_ASCII(Mrcut=18):
    ''' Append MPA-JHU SSFR values to the Tinker et al. (2011) catalog.
    The main purpose is to try to reproduce the Kauffmann et al. (2013) results. 
    Galaxies are matched to each other through spherematch. 
    '''
    # import Tinker et al. (2011) catalog with specified Mr cut
    catalog = TinkerCatalog(Mrcut=Mrcut)

    # import MPA-JHU catalog
    mpajhu_gals = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_info_dr7_v5_2.fit']))
    # SFR total
    mpajhu_sfrtot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_totsfr_dr7_v5_2.fits']))
    # SFR fiber
    mpajhu_sfrfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_fibsfr_dr7_v5_2.fits']))
    # SSFR total
    mpajhu_ssfrtot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_totspecsfr_dr7_v5_2.fits']))
    # SSFR fiber
    mpajhu_ssfrfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_fibspecsfr_dr7_v5_2.fits']))
    # stellar mass total
    mpajhu_masstot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'totlgm_dr7_v5_2.fit']))
    # stellar mass fiber
    mpajhu_massfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'fiblgm_dr7_v5_2.fit']))

    t_spherematch = time.time()
    match = spherematch(catalog['ra'], catalog['dec'], mpajhu_gals.ra,
                        mpajhu_gals.dec, 0.000833333)
    print 'Spherematch with matchlenght = ', 0.000833333
    print 'takes ', time.time() - t_spherematch, 'seconds'
    print 1. - np.float(len(match[0])) / np.float(len(
        catalog['ra'])), 'of the VAGC galaxies'
    print 'do not have matches, likely due to fiber collisions'
    if len(match[0]) != len(np.unique(match[0])):
        raise ValueError

    # save the MPAJHU indices, jsut in case
    catalog['mpajhu_index'] = np.repeat(-999, len(catalog['ra']))
    catalog['mpajhu_index'][match[0]] = match[1]

    # append SFR, SSFR, and mass values to catalog
    for col in [
            'sfr_tot_mpajhu', 'sfr_fib_mpajhu', 'ssfr_tot_mpajhu',
            'ssfr_fib_mpajhu', 'mass_tot_mpajhu', 'mass_fib_mpajhu'
    ]:  # initiate arrays
        catalog[col] = np.repeat(-999., len(catalog['ra']))

    catalog['sfr_tot_mpajhu'][match[0]] = mpajhu_sfrtot.median[match[1]]
    catalog['sfr_fib_mpajhu'][match[0]] = mpajhu_sfrfib.median[match[1]]
    catalog['ssfr_tot_mpajhu'][match[0]] = mpajhu_ssfrtot.median[match[1]]
    catalog['ssfr_fib_mpajhu'][match[0]] = mpajhu_ssfrfib.median[match[1]]
    catalog['mass_tot_mpajhu'][match[0]] = mpajhu_masstot.median[match[1]]
    catalog['mass_fib_mpajhu'][match[0]] = mpajhu_massfib.median[match[1]]
    print mpajhu_massfib.median[match[1]]

    first_cols = [
        'id_gal', 'ra', 'dec', 'z', 'mass', 'sfr', 'ssfr', 'mass_tot_mpajhu',
        'mass_fib_mpajhu', 'sfr_tot_mpajhu', 'sfr_fib_mpajhu',
        'ssfr_tot_mpajhu', 'ssfr_fib_mpajhu'
    ]

    data_fmt = []
    data_list = []
    for i_key, key in enumerate(first_cols):
        data_list.append(catalog[key])
        if key == 'id_gal':
            data_fmt.append('%i')
        else:
            data_fmt.append('%10.5f')

    later_cols = []
    for key in catalog.keys():
        if key not in first_cols:
            later_cols.append(key)

    for key in later_cols:
        data_list.append(catalog[key])
        if 'id' in key:
            data_fmt.append('%i')
        elif 'index' in key:
            data_fmt.append('%i')
        elif key == 'n_sersic':
            data_fmt.append('%i')
        elif key == 'stellmass':
            data_fmt.append('%1.5e')
        else:
            data_fmt.append('%10.5f')

    str_header = ', '.join(first_cols + later_cols)

    M_cut = Tinker_Masscut(Mrcut)
    mpajhu_tinker_file = ''.join([
        UT.dir_dat(), 'tinker2011catalogs/', 'GroupCat.Mr',
        str(Mrcut), '.Mass',
        str(M_cut), '.D360.MPAJHU.dat'
    ])
    np.savetxt(mpajhu_tinker_file, (np.vstack(np.array(data_list))).T,
               fmt=data_fmt,
               delimiter='\t',
               header=str_header)
    return None
Example #11
0
def Build_VAGCdr72_MPAJHU(Ascii=False):
    ''' Build VAGC dr72 with cross referenced MPAJHU stellar masses 
    and SSFRs.
    '''
    # import VAGC dr72bright34
    vagc_dr72 = VAGCdr72bright34_Catalog()
    print len(vagc_dr72['ra']), ', VAGC dr72bright34 galaxies'

    # import MPA-JHU catalog
    mpajhu_gals = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_info_dr7_v5_2.fit']))
    # SFR total
    mpajhu_sfrtot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_totsfr_dr7_v5_2.fits']))
    # SFR fiber
    mpajhu_sfrfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_fibsfr_dr7_v5_2.fits']))
    # SSFR total
    mpajhu_ssfrtot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_totspecsfr_dr7_v5_2.fits']))
    # SSFR fiber
    mpajhu_ssfrfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_fibspecsfr_dr7_v5_2.fits']))
    # stellar mass total
    mpajhu_masstot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'totlgm_dr7_v5_2.fit']))
    # stellar mass fiber
    mpajhu_massfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'fiblgm_dr7_v5_2.fit']))

    catalog = {}
    catalog['ra'] = vagc_dr72['ra']
    catalog['dec'] = vagc_dr72['dec']
    catalog['z'] = vagc_dr72['z']
    for i_band, band in enumerate(['u', 'g', 'r', 'i', 'z']):
        catalog['M_' + band] = vagc_dr72['M_' + band]

    t_spherematch = time.time()
    match = spherematch(catalog['ra'], catalog['dec'], mpajhu_gals.ra,
                        mpajhu_gals.dec, 0.000833333)
    print 'Spherematch with matchlenght = ', 0.000833333
    print 'takes ', time.time() - t_spherematch, 'seconds'
    print 1. - np.float(len(match[0])) / np.float(len(
        catalog['ra'])), 'of the VAGC galaxies'
    print 'do not have matches'
    if len(match[0]) != len(np.unique(match[0])):
        raise ValueError

    # save the MPAJHU indices, jsut in case
    catalog['mpajhu_index'] = np.repeat(-999, len(catalog['ra']))
    catalog['mpajhu_index'][match[0]] = match[1]

    # append SFR, SSFR, and mass values to catalog
    for col in [
            'sfr_tot', 'sfr_fib', 'ssfr_tot', 'ssfr_fib', 'mass_tot',
            'mass_fib'
    ]:  # initiate arrays
        catalog[col] = np.repeat(-999., len(catalog['ra']))

    catalog['sfr_tot'][match[0]] = mpajhu_sfrtot.median[match[1]]
    catalog['sfr_fib'][match[0]] = mpajhu_sfrfib.median[match[1]]
    catalog['ssfr_tot'][match[0]] = mpajhu_ssfrtot.median[match[1]]
    catalog['ssfr_fib'][match[0]] = mpajhu_ssfrfib.median[match[1]]
    catalog['mass_tot'][match[0]] = mpajhu_masstot.median[match[1]]
    catalog['mass_fib'][match[0]] = mpajhu_massfib.median[match[1]]

    mpajhu_file = ''.join(
        [UT.dir_dat(), 'vagc/', 'VAGCdr72.MPAJHU.nocut.hdf5'])

    f = h5py.File(mpajhu_file, 'w')
    grp = f.create_group('data')
    for key in catalog.keys():
        grp.create_dataset(key, data=catalog[key])
    f.close()

    if Ascii:  # write to Ascii (for jeremy)
        mpajhu_file = ''.join(
            [UT.dir_dat(), 'vagc/', 'VAGCdr72.MPAJHU.nocut.dat'])
        column_order = [
            'ra', 'dec', 'z', 'mass_tot', 'sfr_tot', 'ssfr_tot', 'mass_fib',
            'sfr_fib', 'ssfr_fib'
        ]
        data_list = []
        data_fmt = ['%10.5f' for i in range(len(column_order))]
        str_header = ''
        for col in column_order:
            data_list.append(catalog[col])
            if 'mass' in col:
                str_header += ' ' + col + ' (Msun),'
            elif 'sfr' in col:
                if 'ssfr' not in col:
                    str_header += ' ' + col + ' (Msun/yr),'
                else:
                    str_header += ' ' + col + ','
            else:
                str_header += ' ' + col + ','
        np.savetxt(mpajhu_file, (np.vstack(np.array(data_list))).T,
                   fmt=data_fmt,
                   delimiter='\t',
                   header=str_header)
    return None
Example #12
0
def Build_KauffmannParent():
    ''' Try to create the parent sample of Kauffmann et al.(2013) 
    '''
    # import VAGC dr72bright34
    vagc_dr72 = VAGCdr72bright34_Catalog()

    # import MPA-JHU catalog
    mpajhu_gals = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_info_dr7_v5_2.fit']))
    # SFR total
    mpajhu_sfrtot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_totsfr_dr7_v5_2.fits']))
    # SFR fiber
    mpajhu_sfrfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_fibsfr_dr7_v5_2.fits']))
    # SSFR total
    mpajhu_ssfrtot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_totspecsfr_dr7_v5_2.fits']))
    # SSFR fiber
    mpajhu_ssfrfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_fibspecsfr_dr7_v5_2.fits']))
    # stellar mass total
    mpajhu_masstot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'totlgm_dr7_v5_2.fit']))
    # stellar mass fiber
    mpajhu_massfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'fiblgm_dr7_v5_2.fit']))

    catalog = {}
    catalog['ra'] = vagc_dr72['ra']
    catalog['dec'] = vagc_dr72['dec']
    catalog['z'] = vagc_dr72['z']
    for i_band, band in enumerate(['u', 'g', 'r', 'i', 'z']):
        catalog['M_' + band] = vagc_dr72['M_' + band]

    # pre cut
    cut_z = (catalog['z'] > 0.017) & (catalog['z'] < 0.03)
    pre_cuts = np.where(cut_z)  #& cut_stellarmass & cut_absmag)
    for key in catalog.keys():
        catalog[key] = catalog[key][pre_cuts]

    t_spherematch = time.time()
    match = spherematch(catalog['ra'], catalog['dec'], mpajhu_gals.ra,
                        mpajhu_gals.dec, 0.000833333)
    print 'Spherematch with matchlenght = ', 0.000833333
    print 'takes ', time.time() - t_spherematch, 'seconds'
    print 1. - np.float(len(match[0])) / np.float(len(
        catalog['ra'])), 'of the VAGC galaxies'
    print 'do not have matches'
    if len(match[0]) != len(np.unique(match[0])):
        raise ValueError

    # save the MPAJHU indices, jsut in case
    catalog['mpajhu_index'] = np.repeat(-999, len(catalog['ra']))
    catalog['mpajhu_index'][match[0]] = match[1]

    # append SFR, SSFR, and mass values to catalog
    for col in [
            'sfr_tot_mpajhu', 'sfr_fib_mpajhu', 'ssfr_tot_mpajhu',
            'ssfr_fib_mpajhu', 'mass_tot_mpajhu', 'mass_fib_mpajhu'
    ]:  # initiate arrays
        catalog[col] = np.repeat(-999., len(catalog['ra']))

    catalog['sfr_tot_mpajhu'][match[0]] = mpajhu_sfrtot.median[match[1]]
    catalog['sfr_fib_mpajhu'][match[0]] = mpajhu_sfrfib.median[match[1]]
    catalog['ssfr_tot_mpajhu'][match[0]] = mpajhu_ssfrtot.median[match[1]]
    catalog['ssfr_fib_mpajhu'][match[0]] = mpajhu_ssfrfib.median[match[1]]
    catalog['mass_tot_mpajhu'][match[0]] = mpajhu_masstot.median[match[1]]
    catalog['mass_fib_mpajhu'][match[0]] = mpajhu_massfib.median[match[1]]

    # kauffmann et al.(2013) cuts
    cut_stellarmass = (catalog['mass_tot_mpajhu'] > 9.25)
    cut_absmag = (catalog['M_r'] < -16.) & (catalog['M_r'] > -24.)
    cut_match = (catalog['mpajhu_index'] != -999)

    final_cuts = np.where(cut_stellarmass & cut_absmag & cut_match)
    for key in catalog.keys():
        catalog[key] = catalog[key][final_cuts]

    mpajhu_file = ''.join(
        [UT.dir_dat(), 'vagc/', 'VAGCdr72.Kauff2013cut.hdf5'])

    f = h5py.File(mpajhu_file, 'w')
    grp = f.create_group('data')
    for key in catalog.keys():
        grp.create_dataset(key, data=catalog[key])
    f.close()
    return None
Example #13
0
def Build_MPAJHU_TinkerCatalog(Mrcut=18):
    ''' Append MPA-JHU SSFR values to the Tinker et al. (2011) catalog.
    The main purpose is to try to reproduce the Kauffmann et al. (2013) results. 
    Galaxies are matched to each other through spherematch. 
    '''
    # import Tinker et al. (2011) catalog with specified Mr cut
    catalog = TinkerCatalog(Mrcut=Mrcut)

    # import MPA-JHU catalog
    mpajhu_gals = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_info_dr7_v5_2.fit']))
    # SFR total
    mpajhu_sfrtot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_totsfr_dr7_v5_2.fits']))
    # SFR fiber
    mpajhu_sfrfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_fibsfr_dr7_v5_2.fits']))
    # SSFR total
    mpajhu_ssfrtot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_totspecsfr_dr7_v5_2.fits']))
    # SSFR fiber
    mpajhu_ssfrfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'gal_fibspecsfr_dr7_v5_2.fits']))
    # stellar mass total
    mpajhu_masstot = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'totlgm_dr7_v5_2.fit']))
    # stellar mass fiber
    mpajhu_massfib = mrdfits(''.join(
        [UT.dir_dat(), 'mpa_jhu/', 'fiblgm_dr7_v5_2.fit']))

    t_spherematch = time.time()
    match = spherematch(catalog['ra'], catalog['dec'], mpajhu_gals.ra,
                        mpajhu_gals.dec, 0.000833333)
    print 'Spherematch with matchlenght = ', 0.000833333
    print 'takes ', time.time() - t_spherematch, 'seconds'
    print 1. - np.float(len(match[0])) / np.float(len(
        catalog['ra'])), 'of the VAGC galaxies'
    print 'do not have matches, likely due to fiber collisions'
    if len(match[0]) != len(np.unique(match[0])):
        raise ValueError

    # save the MPAJHU indices, jsut in case
    catalog['mpajhu_index'] = np.repeat(-999, len(catalog['ra']))
    catalog['mpajhu_index'][match[0]] = match[1]

    # append SFR, SSFR, and mass values to catalog
    for col in [
            'sfr_tot_mpajhu', 'sfr_fib_mpajhu', 'ssfr_tot_mpajhu',
            'ssfr_fib_mpajhu', 'mass_tot_mpajhu', 'mass_fib_mpajhu'
    ]:  # initiate arrays
        catalog[col] = np.repeat(-999., len(catalog['ra']))

    catalog['sfr_tot_mpajhu'][match[0]] = mpajhu_sfrtot.median[match[1]]
    catalog['sfr_fib_mpajhu'][match[0]] = mpajhu_sfrfib.median[match[1]]
    catalog['ssfr_tot_mpajhu'][match[0]] = mpajhu_ssfrtot.median[match[1]]
    catalog['ssfr_fib_mpajhu'][match[0]] = mpajhu_ssfrfib.median[match[1]]
    catalog['mass_tot_mpajhu'][match[0]] = mpajhu_masstot.median[match[1]]
    catalog['mass_fib_mpajhu'][match[0]] = mpajhu_massfib.median[match[1]]

    # trim galaxies without matches
    hasmatch = np.where(catalog['mpajhu_index'] != -999)
    for key in catalog.keys():
        key_val = catalog[key]
        catalog[key] = key_val[hasmatch]
    catalog['mpajhu_tinker_index'] = hasmatch[0]

    M_cut = Tinker_Masscut(Mrcut)
    mpajhu_tinker_file = ''.join([
        UT.dir_dat(), 'tinker2011catalogs/', 'GroupCat.Mr',
        str(Mrcut), '.Mass',
        str(M_cut), '.D360.MPAJHU.hdf5'
    ])

    f = h5py.File(mpajhu_tinker_file, 'w')
    grp = f.create_group('data')
    for key in catalog.keys():
        grp.create_dataset(key, data=catalog[key])

    f.close()
    return None