コード例 #1
0
ファイル: ast.py プロジェクト: cs362sp16/cs562w16
def check_description_unsing_offical_method():
    t = Table()
    while True:
        Random_column_Unit = (''.join(
            random.choice(ascii_uppercase) for i in range(1)))
        Random_column_Name = (''.join(
            random.choice(ascii_uppercase) for i in range(2)))
        Random_description = (''.join(
            random.choice(ascii_uppercase) for i in range(12)))
        random_int = random.randint(0, 9)
        random_int2 = random.randint(0, 9)
        random_int3 = random.randint(0, 9)
        t[Random_column_Name] = Column([random_int, random_int2, random_int3],
                                       unit=Random_column_Unit,
                                       description=Random_description)
        t[Random_column_Name].description = Random_description
        if (Random_description == t[Random_column_Name].description):
            print Random_description, '==', t[Random_column_Name].description
        else:
            print Random_description, '!=', t[Random_column_Name].description

        assert Random_description == t[Random_column_Name].description

        print(t)

        print 'Pass test for adding column: ', t[Random_column_Name]
    return
コード例 #2
0
ファイル: test_hdf5.py プロジェクト: tereyaedwards/the-office
def test_preserve_serialized_compatibility_mode(tmpdir):
    test_file = str(tmpdir.join('test.hdf5'))

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}

    with catch_warnings() as w:
        t1.write(test_file,
                 path='the_table',
                 serialize_meta=True,
                 overwrite=True,
                 compatibility_mode=True)

    assert str(w[0].message).startswith(
        "compatibility mode for writing is deprecated")

    t2 = Table.read(test_file, path='the_table')

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta
コード例 #3
0
ファイル: test_hdf5.py プロジェクト: MaxNoe/astropy
def test_preserve_serialized_compatibility_mode(tmpdir):
    test_file = str(tmpdir.join('test.hdf5'))

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}

    with catch_warnings() as w:
        t1.write(test_file, path='the_table', serialize_meta=True,
                 overwrite=True, compatibility_mode=True)

    assert str(w[0].message).startswith(
        "compatibility mode for writing is deprecated")

    t2 = Table.read(test_file, path='the_table')

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta
コード例 #4
0
def test_preserve_serialized(tmpdir):
    test_file = str(tmpdir.join('test.hdf5'))

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}

    t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True)

    t2 = Table.read(test_file, path='the_table')

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta

    # Check that the meta table is fixed-width bytes (see #11299)
    h5 = h5py.File(test_file, 'r')
    meta_lines = h5[meta_path('the_table')]
    assert meta_lines.dtype.kind == 'S'
コード例 #5
0
def test_metadata_very_large(tmpdir):
    """Test that very large datasets work"""

    test_file = tmpdir.join('test.parquet')

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}
    t1.meta["meta_big"] = "0" * (2**16 + 1)
    t1.meta["meta_biggerstill"] = "0" * (2**18)

    t1.write(test_file, overwrite=True)

    t2 = Table.read(test_file)

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta
コード例 #6
0
def test_preserve_serialized(tmpdir):
    test_file = str(tmpdir.join('test.hdf5'))

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}

    t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True)

    t2 = Table.read(test_file, path='the_table')

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta
コード例 #7
0
ファイル: test_hdf5.py プロジェクト: MaxNoe/astropy
def test_preserve_serialized(tmpdir):
    test_file = str(tmpdir.join('test.hdf5'))

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}

    t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True)

    t2 = Table.read(test_file, path='the_table')

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta
コード例 #8
0
ファイル: ast.py プロジェクト: agroce/cs562w16
def	check_description_unsing_offical_method():
	t=Table()
	while True:
		Random_column_Unit=(''.join(random.choice(ascii_uppercase) for i in range(1)))
		Random_column_Name=(''.join(random.choice(ascii_uppercase) for i in range(2)))
		Random_description= (''.join(random.choice(ascii_uppercase) for i in range(12)))
		random_int= random.randint(0,9)
		random_int2= random.randint(0,9)
		random_int3= random.randint(0,9)
		t[Random_column_Name]= Column([random_int,random_int2,random_int3], unit= Random_column_Unit, description=Random_description)
		t[Random_column_Name].description = Random_description
		if (Random_description == t[Random_column_Name].description):
			print Random_description, '==', t[Random_column_Name].description
		else:
			print Random_description, '!=', t[Random_column_Name].description

		assert Random_description == t[Random_column_Name].description

		print(t)
	
		print 'Pass test for adding column: ', t[Random_column_Name]
	return 
コード例 #9
0
def test_preserve_serialized_old_meta_format(tmpdir):
    """Test the old meta format

    Only for some files created prior to v4.0, in compatibility mode.
    """
    test_file = get_pkg_data_filename('data/old_meta_example.hdf5')

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}

    t2 = Table.read(test_file, path='the_table')

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta
コード例 #10
0
def test_preserve_serialized(tmpdir):
    """Test that writing/reading preserves unit/format/description."""

    test_file = tmpdir.join('test.parquet')

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}

    t1.write(test_file, overwrite=True)

    t2 = Table.read(test_file)

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta
コード例 #11
0
ファイル: test_hdf5.py プロジェクト: MaxNoe/astropy
def test_metadata_very_large(tmpdir):
    """Test that very large datasets work, now!"""
    test_file = str(tmpdir.join('test.hdf5'))

    t1 = Table()
    t1['a'] = Column(data=[1, 2, 3], unit="s")
    t1['a'].meta['a0'] = "A0"
    t1['a'].meta['a1'] = {"a1": [0, 1]}
    t1['a'].format = '7.3f'
    t1['a'].description = 'A column'
    t1.meta['b'] = 1
    t1.meta['c'] = {"c0": [0, 1]}
    t1.meta["meta_big"] = "0" * (2 ** 16 + 1)
    t1.meta["meta_biggerstill"] = "0" * (2 ** 18)

    t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True)

    t2 = Table.read(test_file, path='the_table')

    assert t1['a'].unit == t2['a'].unit
    assert t1['a'].format == t2['a'].format
    assert t1['a'].description == t2['a'].description
    assert t1['a'].meta == t2['a'].meta
    assert t1.meta == t2.meta
コード例 #12
0
def add_ltemass(label='pcc_12',
                n13cub=None,
                i12cub=None,
                i13cub=None,
                n13cub_uc=None,
                distpc=4.8e4,
                co13toh2=5.0e6):

    # Make the uncertainty input a list
    if not isinstance(n13cub_uc, list): n13cub_uc = [n13cub_uc]

    # Adopted parameters
    dist = distpc * u.pc

    # Get basic info from header
    hd = getheader(n13cub)
    deltav = np.abs(hd['cdelt3'] / 1000.)
    pixdeg = np.abs(hd['cdelt2'])
    pix2cm = (np.radians(pixdeg) * dist).to(u.cm)
    ppbeam = np.abs((hd['bmaj'] * hd['bmin']) / (hd['cdelt1'] * hd['cdelt2']) *
                    2 * np.pi / (8 * np.log(2)))
    osamp = np.sqrt(ppbeam)

    # Total the LTE masses (and optionally, 12CO and 13CO fluxes)
    d = Dendrogram.load_from(label + '_dendrogram.hdf5')
    cat = Table.read(label + '_physprop.txt', format='ascii.ecsv')
    srclist = cat['_idx'].tolist()
    for col in [
            'flux12', 'flux13', 'mlte', 'e_mlte', 'siglte', 'e_siglte',
            'e_mlte_alt'
    ]:
        newcol = Column(name=col, data=np.zeros(np.size(srclist)))

        if col == 'flux12':
            if i12cub is not None:
                data, ihd = getdata(i12cub, header=True)
                if 'RESTFREQ' in ihd.keys():
                    rfreq = ihd['RESTFREQ'] * u.Hz
                elif 'RESTFRQ' in ihd.keys():
                    rfreq = ihd['RESTFRQ'] * u.Hz
                newcol.description = '12CO flux within the structure'
            else:
                continue
        elif col == 'flux13':
            if i13cub is not None:
                data, ihd = getdata(i13cub, header=True)
                if 'RESTFREQ' in ihd.keys():
                    rfreq = ihd['RESTFREQ'] * u.Hz
                elif 'RESTFRQ' in ihd.keys():
                    rfreq = ihd['RESTFRQ'] * u.Hz
                newcol.description = '13CO flux within the structure'
            else:
                continue
        elif col == 'mlte':
            data = getdata(n13cub)
            newcol.description = 'LTE mass using H2/13CO=' + str(co13toh2)
        elif col == 'e_mlte':
            data = getdata(n13cub_uc[0])
            newcol.description = 'fractional unc in mlte'
        elif col == 'siglte':
            data = getdata(n13cub)
            newcol.description = 'LTE mass divided by area in pc2'
        elif col == 'e_siglte':
            data = getdata(n13cub_uc[0])
            newcol.description = 'fractional unc in siglte [same as e_lte]'
        elif col == 'e_mlte_alt':
            if len(n13cub_uc) > 1:
                data = getdata(n13cub_uc[1])
                newcol.description = 'fractional unc in mlte from alt approach'
            else:
                continue

        for i, c in enumerate(srclist):
            mask = d[c].get_mask()
            if not col.startswith('e_'):
                newcol[i] = np.nansum(data[np.where(mask)])
                # nansum returns zero if all are NaN, want NaN
                chknan = np.asarray(np.isnan(data[np.where(mask)]))
                if chknan.all():
                    newcol[i] = np.nan
            else:
                newcol[i] = np.sqrt(np.nansum(data[np.where(mask)]**2)) * osamp

        if col in ['flux12', 'flux13']:
            # Convert from K*pix*ch to Jy*km/s
            convfac = (1 * u.K).to(
                u.Jy / u.deg**2, equivalencies=u.brightness_temperature(rfreq))
            newcol *= deltav * convfac.value * (pixdeg)**2
            newcol.unit = 'Jy km / s'
        else:
            # Multiply by channel width in km/s and area in cm^2 to get molecule number
            newcol *= deltav * pix2cm.value**2
            # Convert from molecule number to solar masses including He
            newcol *= co13toh2 * 2 * 1.36 * const.m_p.value / const.M_sun.value
            if col == 'mlte':
                newcol.unit = 'solMass'
            elif col == 'siglte':
                newcol /= cat['area_pc2']
                newcol.unit = 'solMass/pc2'
            else:
                newcol /= cat['mlte']
                newcol.unit = ''

        cat.add_column(newcol)

    #cat.pprint(show_unit=True)
    cat.write(label + '_physprop_add.txt', format='ascii.ecsv', overwrite=True)

    return
コード例 #13
0
def calc_phys_props(label='pcc_12',
                    cubefile=None,
                    dendrofile=None,
                    boot_iter=400,
                    efloor=0,
                    alphascale=1,
                    distpc=4.8e4,
                    copbcor=None,
                    conoise=None,
                    ancfile=None,
                    anclabel=None,
                    verbose=False,
                    clipping=False,
                    co13toh2=1e6,
                    n13cube=None,
                    n13errcube=None,
                    dendro_in=None):

    if clipping:
        clipstr = "_clipped"
    else:
        clipstr = ""

    rmstorad = 1.91
    alphaco = 4.3 * u.solMass * u.s / (u.K * u.km * u.pc**2)  # Bolatto+ 13
    dist = distpc * u.pc
    as2 = 1 * u.arcsec**2
    asarea = (as2 * dist**2).to(u.pc**2,
                                equivalencies=u.dimensionless_angles())

    # ---- load the dendrogram and catalog
    if dendrofile == None:
        dendrofile = label + '_dendrogram.hdf5'

    if dendro_in != None:
        d = dendro_in
    elif os.path.isfile(dendrofile):
        print('Loading pre-existing dendrogram')
        d = Dendrogram.load_from(dendrofile)
    cat = Table.read(label + '_full_catalog' + clipstr + '.txt',
                     format='ascii.ecsv')
    srclist = cat['_idx'].tolist()

    # ---- load the cube and extract the metadata
    cube, hd3 = getdata(cubefile, header=True)
    metadata = {}
    if hd3['BUNIT'].upper() == 'JY/BEAM':
        metadata['data_unit'] = u.Jy / u.beam
    elif hd3['BUNIT'].upper() == 'K':
        metadata['data_unit'] = u.K
    else:
        print("\nWarning: Unrecognized brightness unit")
    metadata['vaxis'] = 0
    if 'RESTFREQ' in hd3.keys():
        freq = hd3['RESTFREQ'] * u.Hz
    elif 'RESTFRQ' in hd3.keys():
        freq = hd3['RESTFRQ'] * u.Hz
    cdelt1 = abs(hd3['cdelt1']) * 3600. * u.arcsec
    cdelt2 = abs(hd3['cdelt2']) * 3600. * u.arcsec
    # this assumes vel cube!
    if hd3['ctype3'][0:4] == "FREQ":
        nu0 = hd3['restfrq']
        dnu = hd3['cdelt3']
        deltav = 2.99792458e5 * np.absolute(dnu) / nu0 * u.km / u.s
    else:
        deltav = abs(hd3['cdelt3']) / 1000. * u.km / u.s
    metadata['wavelength'] = freq.to(u.m, equivalencies=u.spectral())
    metadata['spatial_scale'] = cdelt2
    metadata['velocity_scale'] = deltav
    bmaj = hd3['bmaj'] * 3600. * u.arcsec  # FWHM
    bmin = hd3['bmin'] * 3600. * u.arcsec  # FWHM
    metadata['beam_major'] = bmaj
    metadata['beam_minor'] = bmin
    ppbeam = np.abs(
        (bmaj * bmin) / (cdelt1 * cdelt2) * 2 * np.pi / (8 * np.log(2)))
    print("\nPixels per beam: {:.2f}".format(ppbeam))
    # Assume every 2 channels have correlated noise
    indfac = np.sqrt(ppbeam * 2)

    # ---- read in ancillary files
    if copbcor is not None and conoise is not None:
        cube12, hd12 = getdata(copbcor, header=True)
        ecube12, ehd12 = getdata(conoise, header=True)
        if 'RESTFREQ' in hd12.keys():
            freq12 = hd12['RESTFREQ'] * u.Hz
        elif 'RESTFRQ' in hd12.keys():
            freq12 = hd12['RESTFRQ'] * u.Hz
        else:
            print('Rest frequency missing from file ' + copbcor)
            raise
        if hd12['BUNIT'].upper() != 'K':
            print('Non-Kelvin units not yet supported for copbcor')
            raise
        if ehd12['NAXIS'] == 2:
            tmpcube = np.broadcast_to(ecube12, np.shape(cube12))
            ecube12 = tmpcube
    if ancfile is not None:
        ancdata, anchd = getdata(ancfile, header=True)

    # elliptical function
    def w_ell(ratio):
        return np.log(np.sqrt(ratio**2 - 1) + ratio) / np.sqrt(ratio**2 - 1)

    # ---- call the bootstrapping routine
    emaj, emin, epa, evrms, errms, eaxra, eflux, emvir, eemvir, ealpha, tb12, ancmean, ancrms = [
        np.zeros(len(srclist)) for _ in range(13)
    ]
    print("Calculating property errors... with %i iterations" % boot_iter)
    print("....................\r", end='')
    nsrc = len(srclist)
    nsrcd = nsrc // 20

    for j, clust in enumerate(srclist):
        if j % nsrcd == 0 and j > 0:
            print("+" * (j // nsrcd) + "." * ((nsrc - j) // nsrcd) + "\r",
                  end='')

        if verbose: print("   cl", j, "/", len(srclist))
        asgn = np.zeros(cube.shape)
        asgn[d[clust].get_mask(shape=asgn.shape)] = 1
        sindices = np.where(asgn == 1)
        svalues = cube[sindices]

        emmajs, emmins, emomvs, emom0s, pa = clustbootstrap(sindices,
                                                            svalues,
                                                            metadata,
                                                            boot_iter,
                                                            verbose=False)
        # bin_list=np.linspace(np.floor(min(emmajs)),np.ceil(max(emmajs)),50)
        # fig, axes = plt.subplots()
        # axes.hist(emmajs, bin_list, normed=0, histtype='bar')
        # plt.savefig('emmajs_histo.pdf', bbox_inches='tight')

        bootmaj = np.asarray(emmajs) * u.arcsec
        bootmajpc = (bootmaj * dist).to(u.pc,
                                        equivalencies=u.dimensionless_angles())
        bootmin = np.asarray(emmins) * u.arcsec
        bootpa = np.asarray(pa) * u.deg
        bootvrms = (np.asarray(emomvs) * u.m / u.s).to(u.km / u.s)
        bootrrms = (np.sqrt(np.asarray(emmajs) * np.asarray(emmins)) *
                    u.arcsec * dist).to(u.pc,
                                        equivalencies=u.dimensionless_angles())
        bootaxrat = bootmin / bootmaj
        bootflux = np.asarray(emom0s) * u.Jy  # RI: cube assumed to be Jy
        bootmvir = (5 * rmstorad * bootvrms**2 * bootrrms / const.G).to(
            u.solMass)
        # elliptical mvir:
        bootemvir = (5 * rmstorad * bootvrms**2 * bootmajpc /
                     w_ell(1. / bootaxrat) / const.G).to(u.solMass)
        bootmlum = alphaco * alphascale * deltav * asarea * (bootflux).to(
            u.K, equivalencies=u.brightness_temperature(as2, freq))
        bootalpha = bootmvir / bootmlum

        emaj[j] = indfac * mad_std(bootmaj) / np.median(bootmaj)
        emin[j] = indfac * mad_std(bootmin) / np.median(bootmin)
        epa[j] = indfac * mad_std(bootpa) / np.median(bootpa)
        evrms[j] = indfac * mad_std(bootvrms) / np.median(bootvrms)
        errms[j] = indfac * mad_std(bootrrms) / np.median(bootrrms)
        eaxra[j] = indfac * mad_std(bootaxrat) / np.median(bootaxrat)
        emvir[j] = indfac * mad_std(bootmvir) / np.median(bootmvir)
        eemvir[j] = indfac * mad_std(bootemvir) / np.median(bootemvir)
        ealpha[j] = indfac * mad_std(bootalpha) / np.median(bootalpha)

        if copbcor is not None and conoise is not None:
            tb12[j] = np.nansum(asgn * cube12)
            eflux[j] = indfac * np.sqrt(np.nansum(asgn * ecube12**2)) / tb12[j]
            #print(j, len(svalues), tb12[j], etb12[j])
        else:
            eflux[j] = indfac * mad_std(bootflux) / np.median(bootflux)
        if ancfile is not None:
            if anchd['NAXIS'] == 2:
                collapsedmask = np.amax(asgn, axis=0)
                collapsedmask[collapsedmask == 0] = np.nan
                ancmean[j] = np.nanmean(ancdata * collapsedmask)
                ancrms[j] = np.sqrt(
                    np.nanmean((ancdata * collapsedmask)**2) - ancmean[j]**2)
            else:
                ancmean[j] = np.nanmean(ancdata * asgn)
                ancrms[j] = np.sqrt(
                    np.nanmean((ancdata * asgn)**2) - ancmean[j]**2)

    print()
    # ---- report the median uncertainties
    print("The median fractional error in rad_pc is {:2.4f}".format(
        np.nanmedian(errms)))
    print("The median fractional error in vrms_k is {:2.4f}".format(
        np.nanmedian(evrms)))
    print("The median fractional error in mlumco is {:2.4f}".format(
        np.nanmedian(eflux)))

    # ---- apply a floor if requested
    if efloor > 0:
        print("Applying a minimum fractional error of {:2.3f}".format(efloor))
        errms[errms < efloor] = efloor
        evrms[evrms < efloor] = efloor
        eflux[eflux < efloor] = efloor

    # ---- calculate the physical properties
    rms_pc = (cat['radius'] * dist).to(u.pc,
                                       equivalencies=u.dimensionless_angles())
    maj_pc = (cat['major_sigma'] * dist).to(
        u.pc, equivalencies=u.dimensionless_angles())
    rad_pc = rmstorad * rms_pc
    v_rms = cat['v_rms'].to(u.km / u.s)
    axrat = cat['minor_sigma'] / cat['major_sigma']
    axrat.unit = ""
    ellarea = (cat['area_ellipse'] * dist**2).to(
        u.pc**2, equivalencies=u.dimensionless_angles())
    xctarea = (cat['area_exact'] * dist**2).to(
        u.pc**2, equivalencies=u.dimensionless_angles())
    if copbcor is not None and conoise is not None:
        # Convert from K*pix*ch to Jy*km/s
        #convfac = (1*u.K).to(u.Jy/u.arcsec**2, equivalencies=u.brightness_temperature(freq12))
        #flux12 = tb12 * deltav.value * convfac.value * cdelt2.value**2
        mlumco = alphaco * alphascale * tb12 * u.K * deltav * asarea * cdelt2.value**2
    else:
        # lumco = Luminosity in K km/s pc^2
        lumco = deltav * asarea * (cat['flux']).to(
            u.K, equivalencies=u.brightness_temperature(as2, freq))
        mlumco = alphaco * alphascale * lumco
    siglum = mlumco / xctarea
    mvir = (5 * rmstorad * v_rms**2 * rms_pc / const.G).to(
        u.solMass)  # Rosolowsky+ 08
    emvir = (5 * rmstorad * v_rms**2 * maj_pc / w_ell(1. / axrat) /
             const.G).to(u.solMass)  # Rosolowsky+ 08

    sigvir = mvir / xctarea
    sigevir = emvir / xctarea
    alpha = mvir / mlumco

    # ---- make the physical properties table
    ptab = Table()
    ptab['_idx'] = Column(srclist)
    ptab['area_pc2'] = Column(xctarea,
                              description='projected area of structure')
    ptab['rad_pc'] = Column(rad_pc, description='equivalent radius in pc')
    ptab['e_rad_pc'] = Column(errms, description='frac error in radius')
    ptab['vrms_k'] = Column(v_rms, description='rms linewidth in km/s')
    ptab['e_vrms_k'] = Column(evrms, description='frac error in linewidth')
    ptab['axratio'] = Column(axrat,
                             unit='',
                             description='minor to major axis ratio')
    ptab['e_axratio'] = Column(eaxra, description='frac error in axis ratio')
    if copbcor is not None and conoise is not None:
        #ptab['flux12']    = Column(flux12, unit='Jy km / s', description='CO flux in structure')
        #ptab['e_flux12']  = Column(eflux, description='frac error in CO flux')
        ptab['mlumco'] = Column(mlumco,
                                description='CO-based mass with alphascale=' +
                                str(alphascale) + ' from ' +
                                os.path.basename(copbcor))
    else:
        ptab['mlumco'] = Column(
            mlumco,
            description='Mass from scaling luminosity with alphascale=' +
            str(alphascale))
    ptab['e_mlumco'] = Column(eflux, description='frac error in luminous mass')
    ptab['siglum'] = Column(siglum,
                            description='average surface density from mlumco')
    ptab['e_siglum'] = Column(eflux, description='same as e_mlumco')
    ptab['mvir'] = Column(mvir, description='virial mass')
    ptab['e_mvir'] = Column(emvir, description='frac error in virial mass')
    ptab['emvir'] = Column(emvir, description='elliptical virial mass')
    ptab['e_emvir'] = Column(
        eemvir, description='frac error in elliptical virial mass')
    ptab['sigvir'] = Column(sigvir, description='virial surface density')
    ptab['e_sigvir'] = Column(emvir, description='same as e_mvir')
    ptab['sigevir'] = Column(sigevir,
                             description='ell. virial surface density')
    ptab['e_sigevir'] = Column(emvir, description='same as e_mvir')
    ptab['alpha'] = Column(alpha, unit='', description='virial parameter')
    ptab['e_alpha'] = Column(ealpha,
                             description='frac error in virial parameter')
    if ancfile is not None:
        if anclabel is None:
            anclabel = ancimg.replace('.', '_').split('_')[1]
        ptab[anclabel] = Column(ancmean, unit=anchd['BUNIT'])
        ancferr = indfac * ancrms / ancmean
        ptab['e_' + anclabel] = Column(ancferr)

    from ellfit import ellfit
    # add ellipse at half-max like in cprops
    halfmax_ell_maj = np.zeros(len(srclist), dtype=np.float64)
    halfmax_ell_min = np.zeros(len(srclist), dtype=np.float64)
    halfmax_ell_pa = np.zeros(len(srclist), dtype=np.float64)
    if clipping:
        tmax = cat['tmax-tmin']
    else:
        tmax = cat['tmax']
    for i, c in enumerate(srclist):
        if tmax[i] > 0:
            ind = d[c].indices()
            half_ind_z = np.where(cube[ind] > 0.5 * cube[ind].max())[0]
            z, y, x = ind
            # unique set of 2-d indices for the 3-d clump
            twod_id = x[half_ind_z] + y[half_ind_z] * (x[half_ind_z].max() + 1)

            # half_twod_ind = np.unique(twod_id[half_ind],return_index=True)[1]
            half_twod_ind = np.unique(twod_id, return_index=True)[1]
            half_twod_x = x[half_twod_ind]
            half_twod_y = y[half_twod_ind]

            halfmax_ell_maj[i], halfmax_ell_min[i], halfmax_ell_pa[i] = ellfit(
                half_twod_x, half_twod_y)

    ptab['halfmax_ell_maj'] = Column(halfmax_ell_maj,
                                     name='halfmax_ell_maj',
                                     unit='pix')
    ptab['halfmax_ell_min'] = Column(halfmax_ell_min,
                                     name='halfmax_ell_min',
                                     unit='pix')
    ptab['halfmax_ell_pa'] = Column(halfmax_ell_min,
                                    name='halfmax_ell_pa',
                                    unit='rad')

    # go ahead and add lte mass here to have all this in one place
    if n13cube != None:
        if os.path.exists(n13cube):
            print("adding MLTE from " + n13cube)
            srclist = ptab['_idx'].tolist()
            newcol = Column(name="mlte", data=np.zeros(np.size(srclist)))
            data = fits.getdata(n13cube)

            cubehdr = fits.getheader(n13cube)
            dx = cubehdr['cdelt2'] * 3600 / 206265 * dist.value  # pc
            nu0 = cubehdr['restfrq']
            if cubehdr['ctype3'] == 'VRAD' or cubehdr['ctype3'][0:3] == 'VEL':
                dv = cubehdr['cdelt3'] / 1000
            else:
                dnu = cubehdr['cdelt3']
                dv = 2.99792458e5 * np.absolute(dnu) / nu0

            newcol.description = 'LTE mass using H2/13CO=' + str(co13toh2)
            if os.path.exists(n13errcube):
                e_newcol = Column(name="e_mlte",
                                  data=np.zeros(np.size(srclist)))
                uncert2 = fits.getdata(n13errcube)**2
                e_newcol.description = 'LTE mass uncert.'

            for i, c in enumerate(srclist):
                mask = d[c].get_mask()
                if clipping:
                    cmin = np.nanmin(data[np.where(mask)])
                    newcol[i] = np.nansum(data[np.where(mask)] - cmin)
                else:
                    newcol[i] = np.nansum(data[np.where(mask)])
                # nansum returns zero if all are NaN, want NaN
                chknan = np.asarray(np.isnan(data[np.where(mask)]))
                if chknan.all():
                    newcol[i] = np.nan

                if os.path.exists(n13errcube):
                    e_newcol[i] = np.nansum(uncert2[np.where(mask)])
                    # nansum returns zero if all are NaN, want NaN
                    chknan = np.asarray(np.isnan(uncert2[np.where(mask)]))
                    if chknan.all():
                        e_newcol[i] = np.nan

            # Multiply by channel width in km/s and area in cm^2 to get molecule number
            newcol *= dv * (dx * 3.09e18)**2
            # Convert from molecule number to solar masses including He
            newcol *= co13toh2 * 2 * 1.36 * 1.66e-24 / 1.99e33
            newcol.unit = 'solMass'
            ptab['mlte'] = newcol

            if os.path.exists(n13errcube):
                e_newcol = np.sqrt(e_newcol)
                e_newcol *= dv * (dx * 3.09e18)**2
                # Convert from molecule number to solar masses including He
                e_newcol *= co13toh2 * 2 * 1.36 * 1.66e-24 / 1.99e33
                e_newcol.unit = 'solMass'
                ptab['e_mlte'] = e_newcol

    ptab.write(label + '_physprop' + clipstr + '.txt',
               format='ascii.ecsv',
               overwrite=True)