示例#1
0
def test_mock_covar():

    data_root = remote_data_file()
    kin = manga.MaNGAStellarKinematics.from_plateifu(8138, 12704, cube_path=data_root,
                                                     maps_path=data_root, covar=True)

    # Set the parameters close to the best-fitting parameters from a previous
    # run
    p0 = numpy.array([-0.2, -0.08, 166.3, 53.0, 25.6, 217.0, 2.82, 189.7, 16.2])

    disk = AxisymmetricDisk(rc=HyperbolicTangent(), dc=Exponential())
    v, s = disk.model(p0, x=kin.grid_x, y=kin.grid_y, sb=kin.grid_sb, beam=kin.beam_fft,
                      is_fft=True)
    vremap = kin.remap(kin.bin(v), mask=kin.vel_mask)
    sremap = kin.remap(kin.bin(s), mask=kin.sig_mask)

    # Fix the seed so that the result is deterministic
    # WARNING: Without this, there were instances where the deviate for the
    # dispersion would be entirely masked!  Need to understand how/why that can
    # happen.
    rng = numpy.random.default_rng(seed=909)
    mock_kin = disk.mock_observation(p0, kin=kin, add_err=True, rng=rng)
    mock_vremap = mock_kin.remap('vel')
    mock_sremap = mock_kin.remap(numpy.sqrt(mock_kin.sig_phys2), mask=kin.sig_mask)

    assert numpy.ma.std(mock_vremap-vremap) > 5, 'Velocity error changed'
    assert numpy.ma.std(mock_sremap-sremap) > 7, 'Dispersion error changed'
示例#2
0
def test_lsq_psf():

    # Read the data to fit
    data_root = remote_data_file()
    kin = manga.MaNGAGasKinematics.from_plateifu(8138, 12704, cube_path=data_root,
                                                 maps_path=data_root)
    # Set the rotation curve
    rc = HyperbolicTangent(lb=numpy.array([0., 1e-3]), ub=numpy.array([500., kin.max_radius()]))
    # Set the disk velocity field
    disk = AxisymmetricDisk(rc=rc)
    # Fit it with a non-linear least-squares optimizer
    disk.lsq_fit(kin) #, verbose=2)

    assert numpy.all(numpy.absolute(disk.par[:2]) < 0.1), 'Center changed'
    assert 165. < disk.par[2] < 167., 'PA changed'
    assert 55. < disk.par[3] < 59., 'Inclination changed'
    assert 252. < disk.par[5] < 255., 'Projected rotation changed'
示例#3
0
def test_disk_derivative_bin():

    # Read the data to fit
    data_root = remote_data_file()
    kin = manga.MaNGAStellarKinematics.from_plateifu(8138, 12704, cube_path=data_root,
                                                     maps_path=data_root)

    disk = AxisymmetricDisk(rc=HyperbolicTangent(), dc=Exponential())
    
    # Ensure that center is offset from 0,0 because of derivative calculation when r==0.
    disk.par[:2] = 0.1
    # Use a slowly rising rotation curve.  More quickly rising rotation curves
    # show a greater difference between the finite-difference and direct
    # derivative calculations after the convolution.
    disk.par[-3] = 20.

    # Finite difference test steps
    #                 x0      y0      pa     inc    vsys   vinf   hv      sig0   hsig
    dp = numpy.array([0.0001, 0.0001, 0.001, 0.001, 0.001, 0.001, 0.0001, 0.001, 0.0001])

    # Include the beam-smearing
    try:
        cnvfftw = ConvolveFFTW(kin.spatial_shape)
    except:
        cnvfftw = None
    v, sig, dv, dsig = disk.deriv_model(disk.par, x=kin.grid_x, y=kin.grid_y, sb=kin.grid_sb, 
                                        beam=kin.beam_fft, is_fft=True, cnvfftw=cnvfftw)
    # Now also include the binning
    bv, dbv = kin.deriv_bin(v, dv)
    bsig, dbsig = kin.deriv_bin(sig, dsig)

    vp = numpy.empty(v.shape+(disk.par.size,), dtype=float)
    sigp = numpy.empty(v.shape+(disk.par.size,), dtype=float)
    bvp = numpy.empty(bv.shape+(disk.par.size,), dtype=float)
    bsigp = numpy.empty(bv.shape+(disk.par.size,), dtype=float)
    p = disk.par.copy()
    for i in range(disk.par.size):
        _p = p.copy()
        _p[i] += dp[i]
        # These calls to `model` reuse the previously provided x, y, sb, beam,
        # and cnvfftw
        vp[...,i], sigp[...,i] = disk.model(_p)
        bvp[...,i] = kin.bin(vp[...,i])
        bsigp[...,i] = kin.bin(sigp[...,i])
    disk._set_par(p)

    fd_dbv = (bvp - bv[...,None])/dp[None,:]
    fd_dbsig = (bsigp - bsig[...,None])/dp[None,:]

    for i in range(disk.par.size):
        assert numpy.allclose(dbv[...,i], fd_dbv[...,i], rtol=0., atol=1e-4), \
                f'Finite difference produced different derivative for parameter {i+1}!'
        # The difference is relatively large (again) for the dispersion data
        assert numpy.allclose(dbsig[...,i], fd_dbsig[...,i], rtol=0., atol=3e-3), \
                f'Finite difference produced different sigma derivative for parameter {i+1}!'
示例#4
0
def test_kinpa():
    disk = AxisymmetricDisk()
    disk.par[:2] = 0.       # Ensure that the center is at 0,0
    disk.par[3] = 45.       # Set the kinematic position angle

    n = 51
    x = numpy.arange(n, dtype=float)[::-1] - n//2
    y = numpy.arange(n, dtype=float) - n//2
    x, y = numpy.meshgrid(x, y)

    vel = disk.model(disk.par, x=x, y=y)

    # Set the global parameters with a flipped position angle
    gp = GlobalPar(pa=-135., ell=0.5, q0=0.)
    pa = gp.guess_kinematic_pa(x, y, vel)

    # Make sure the correct position angle was recovered
    assert numpy.isclose(pa, 45.), 'Position angle should have been flipped'
示例#5
0
文件: lsq.py 项目: kbwestfall/NIRVANA
def test_lsq_nopsf():

    # Read the data to fit
    data_root = remote_data_file()
    kin = manga.MaNGAGasKinematics.from_plateifu(8078,
                                                 12703,
                                                 cube_path=data_root,
                                                 maps_path=data_root)

    nrun = 100
    for i in range(nrun):
        print('{0}/{1}'.format(i + 1, nrun), end='\r')
        # Set the rotation curve
        rc = HyperbolicTangent()
        # Set the disk velocity field
        disk = AxisymmetricDisk(rc)
        # Fit it with a non-linear least-squares optimizer
        disk.lsq_fit(kin)
    print('{0}/{0}'.format(nrun))
示例#6
0
def test_lsq_with_covar():
    # NOTE: This only fits the velocity field....

    # Read the data to fit
    data_root = remote_data_file()
    kin = manga.MaNGAGasKinematics.from_plateifu(8138, 12704, cube_path=data_root,
                                                 maps_path=data_root, covar=True)

    print('Forcing covariance to be positive definite.')
    kin.vel_covar = util.impose_positive_definite(kin.vel_covar)

    # Set the rotation curve
    rc = HyperbolicTangent(lb=numpy.array([0., 1e-3]), ub=numpy.array([500., kin.max_radius()]))
    # Set the disk velocity field
    disk = AxisymmetricDisk(rc=rc) #, dc=dc)
    # Fit it with a non-linear least-squares optimizer
#    import time
#    t = time.perf_counter()
    disk.lsq_fit(kin, sb_wgt=True)
#    print(f'First fit (no covar): {time.perf_counter()-t} s')

    # Rejected based on error-weighted residuals, accounting for intrinsic scatter
    resid = kin.vel - kin.bin(disk.model())
    err = 1/numpy.sqrt(kin.vel_ivar)
    scat = scatter.IntrinsicScatter(resid, err=err, gpm=disk.vel_gpm)
    sig, rej, gpm = scat.iter_fit(fititer=5) #, verbose=2)
    # Check
    assert sig > 8., 'Different intrinsic scatter'
    assert numpy.sum(rej) == 19, 'Different number of pixels were rejected'

    # Refit with new mask, include scatter and covariance
    kin.vel_mask = numpy.logical_not(gpm)
    p0 = disk.par
#    t = time.perf_counter()
    disk.lsq_fit(kin, scatter=sig, sb_wgt=True, p0=p0, ignore_covar=False,
                 assume_posdef_covar=True) #, verbose=2)
#    print(f'Second fit (w/ covar): {time.perf_counter()-t} s')

    # Reject
    resid = kin.vel - kin.bin(disk.model())
    scat = scatter.IntrinsicScatter(resid, covar=kin.vel_covar, gpm=disk.vel_gpm,
                                    assume_posdef_covar=True)
    sig, rej, gpm = scat.iter_fit(fititer=5) #, verbose=2)
    # Check
    assert sig > 5., 'Different intrinsic scatter'
    assert numpy.sum(rej) == 7, 'Different number of pixels were rejected'
    # Model parameters
    assert numpy.all(numpy.absolute(disk.par[:2]) < 0.1), 'Center changed'
    assert 165. < disk.par[2] < 167., 'PA changed'
    assert 56. < disk.par[3] < 58., 'Inclination changed'
    assert 249. < disk.par[5] < 252., 'Projected rotation changed'
示例#7
0
def test_fisher():

    data_root = remote_data_file()
    for use_covar in [False, True]:
        kin = manga.MaNGAStellarKinematics.from_plateifu(8138, 12704, cube_path=data_root,
                                                         maps_path=data_root, covar=use_covar)

        # Set the parameters close to the best-fitting parameters from a previous
        # run
        p0 = numpy.array([-0.2, -0.08, 166.3, 53.0, 25.6, 217.0, 2.82, 189.7, 16.2])

        # Get the Fisher Information Matrix
        disk = AxisymmetricDisk(rc=HyperbolicTangent(), dc=Exponential())
        fim = disk.fisher_matrix(p0, kin, sb_wgt=True)

        # Use it to compute the correlation matrix
        covar = util.cinv(fim)
        var = numpy.diag(covar)
        rho = covar / numpy.sqrt(var[:,None]*var[None,:])

        # Get the upper triangle of the correlation matrix (without the main
        # diagonal)
        indx = numpy.triu_indices(rho.shape[0], k=1)

        # Get the indices of the parameters with the 4 strongest correlation coefficients
        srt = numpy.argsort(numpy.absolute(rho[indx]))[::-1][:4]

        # Check the result.  The strongest correlations should be between:
        #   (7,8) - The two sigma parameters
        #   (1,4) - The y coordinate and the systemic velocity
        #   (3,5) - The inclination and the asymptotic rotation speed
        #   (5,6) - The two rotation curve parameters
        #   (0,1) - The center coordinates
        for correlated_pair in zip(indx[0][srt], indx[1][srt]):
            assert correlated_pair in [(7,8), (1,4), (3,5), (5,6)], \
                'Unexpected pair with strong correlation'
示例#8
0
def test_mock_err():

    data_root = remote_data_file()
    kin = manga.MaNGAStellarKinematics.from_plateifu(8138, 12704, cube_path=data_root,
                                                     maps_path=data_root)

    # Set the parameters close to the best-fitting parameters from a previous
    # run
    p0 = numpy.array([-0.2, -0.08, 166.3, 53.0, 25.6, 217.0, 2.82, 189.7, 16.2])

    disk = AxisymmetricDisk(rc=HyperbolicTangent(), dc=Exponential())
    v, s = disk.model(p0, x=kin.grid_x, y=kin.grid_y, sb=kin.grid_sb, beam=kin.beam_fft,
                      is_fft=True)
    _, bv, bs = kin.bin_moments(kin.grid_sb, v, s)
    vremap = kin.remap(bv, mask=kin.vel_mask)
    sremap = kin.remap(bs, mask=kin.sig_mask)

    rng = numpy.random.default_rng(seed=909)
    mock_kin = disk.mock_observation(p0, kin=kin, add_err=True, rng=rng)
    mock_vremap = mock_kin.remap('vel')
    mock_sremap = mock_kin.remap(numpy.sqrt(mock_kin.sig_phys2), mask=kin.sig_mask)

    assert numpy.ma.std(mock_vremap-vremap) > 5, 'Velocity error changed'
    assert numpy.ma.std(mock_sremap-sremap) > 7, 'Dispersion error changed'
示例#9
0
def test_disk_fit_derivative():

    # Read the data to fit
    data_root = remote_data_file()
    kin = manga.MaNGAStellarKinematics.from_plateifu(8138, 12704, cube_path=data_root,
                                                     maps_path=data_root)

    disk = AxisymmetricDisk(rc=HyperbolicTangent(), dc=Exponential())

    # Set the parameters close to the best-fitting parameters from a previous
    # run
    p0 = numpy.array([-0.2, -0.08, 166.3, 53.0, 25.6, 217.0, 2.82, 189.7, 16.2])
    # Finite difference test steps
    #                 x0      y0      pa     inc    vsys   vinf   hv      sig0   hsig
    dp = numpy.array([0.0001, 0.0001, 0.001, 0.001, 0.001, 0.001, 0.0001, 0.001, 0.0001])

    # Run the fit preparation
    disk._fit_prep(kin, p0, None, None, True, True, True, None)
    # Get the method used to generate the figure-of-merit and the jacobian
    fom = disk._get_fom()
    jac = disk._get_jac()
    # Get the fom and the jacobian
    chi = fom(p0)
    dchi = jac(p0)

    # Brute force it
    chip = numpy.empty(dchi.shape, dtype=float)
    p = disk.par.copy()
    for i in range(disk.par.size):
        _p = p.copy()
        _p[i] += dp[i]
        chip[...,i] = fom(_p)
    disk._set_par(p)

    # Compare them
    fd_dchi = (chip - chi[...,None])/dp[None,:]
    for i in range(disk.par.size):
#        diff = numpy.absolute(dchi[...,i]-fd_dchi[...,i])
#        print(i, numpy.amax(diff), numpy.amin(diff))
#        continue
        assert numpy.allclose(dchi[...,i], fd_dchi[...,i], rtol=0., atol=1e-3), \
                f'Finite difference produced different derivative for parameter {i+1}!'
示例#10
0
def test_disk():
    disk = AxisymmetricDisk()
    disk.par[:2] = 0.       # Ensure that the center is at 0,0
    disk.par[-1] = 1.       # Put in a quickly rising RC

    n = 51
    x = numpy.arange(n, dtype=float)[::-1] - n//2
    y = numpy.arange(n, dtype=float) - n//2
    x, y = numpy.meshgrid(x, y)

    vel = disk.model(disk.par, x=x, y=y)
    beam = gauss2d_kernel(n, 3.)
    _vel = disk.model(disk.par, x=x, y=y, beam=beam)

    assert numpy.isclose(vel[n//2,n//2], _vel[n//2,n//2]), 'Smearing moved the center.'
示例#11
0
def test_disk_derivative():
    disk = AxisymmetricDisk(rc=HyperbolicTangent(), dc=Exponential())
    
    # Ensure that center is offset from 0,0 because of derivative calculation when r==0.
    disk.par[:2] = 0.1
    # Use a slowly rising rotation curve.  More quickly rising rotation curves
    # show a greater difference between the finite-difference and direct
    # derivative calculations after the convolution.
    disk.par[-3] = 20.

    # Finite difference test steps
    #                 x0      y0      pa     inc    vsys   vinf   hv      sig0   hsig
    dp = numpy.array([0.0001, 0.0001, 0.001, 0.001, 0.001, 0.001, 0.0001, 0.001, 0.0001])

    n = 101
    x = numpy.arange(n, dtype=float)[::-1] - n//2
    y = numpy.arange(n, dtype=float) - n//2
    x, y = numpy.meshgrid(x, y)

    v, sig, dv, dsig = disk.deriv_model(disk.par, x=x, y=y)
    vp = numpy.empty(v.shape+(disk.par.size,), dtype=float)
    sigp = numpy.empty(v.shape+(disk.par.size,), dtype=float)
    p = disk.par.copy()
    for i in range(disk.par.size):
        _p = p.copy()
        _p[i] += dp[i]
        # These calls to `model` reuse the previously provided x and y
        vp[...,i], sigp[...,i] = disk.model(_p)
    disk._set_par(p)

    fd_dv = (vp - v[...,None])/dp[None,:]
    fd_dsig = (sigp - sig[...,None])/dp[None,:]
    for i in range(disk.par.size):
        assert numpy.allclose(dv[...,i], fd_dv[...,i], rtol=0., atol=1e-4), \
                f'Finite difference produced different velocity derivative for parameter {i+1}!'
        # The precision is worse for dsig/dx0 and dsig/dy0 at x=y=0.0.  Not sure
        # why.  The larger atol is to account for this.
        assert numpy.allclose(dsig[...,i], fd_dsig[...,i], rtol=0., atol=3e-3), \
                f'Finite difference produced different sigma derivative for parameter {i+1}!'

    # Now include the beam-smearing
    beam = gauss2d_kernel(n, 3.)
    try:
        cnvfftw = ConvolveFFTW(beam.shape)
    except:
        cnvfftw = None
    v, sig, dv, dsig = disk.deriv_model(disk.par, x=x, y=y, beam=beam, cnvfftw=cnvfftw)
    vp = numpy.empty(v.shape+(disk.par.size,), dtype=float)
    sigp = numpy.empty(v.shape+(disk.par.size,), dtype=float)
    p = disk.par.copy()
    for i in range(disk.par.size):
        _p = p.copy()
        _p[i] += dp[i]
        # These calls to `model` reuse the previously provided x, y, beam, and
        # cnvfftw
        vp[...,i], sigp[...,i] = disk.model(_p)
    disk._set_par(p)

    fd_dv = (vp - v[...,None])/dp[None,:]
    fd_dsig = (sigp - sig[...,None])/dp[None,:]
    for i in range(disk.par.size):
        assert numpy.allclose(dv[...,i], fd_dv[...,i], rtol=0., atol=1e-4), \
                f'Finite difference produced different derivative for parameter {i+1}!'
        # Apparently the convolution smooths out the difference seen in the test above
        assert numpy.allclose(dsig[...,i], fd_dsig[...,i], rtol=0., atol=1e-4), \
                f'Finite difference produced different sigma derivative for parameter {i+1}!'
示例#12
0
def test_disk_derivative_nosig():
    disk = AxisymmetricDisk()
    
    # Ensure that center is offset from 0,0 because of derivative calculation when r==0.
    disk.par[:2] = 0.1
    # Use a slowly rising rotation curve.  More quickly rising rotation curves
    # show a greater difference between the finite-difference and direct
    # derivative calculations after the convolution.
    disk.par[-1] = 20.

    # Finite difference test steps
    #                 x0      y0      pa     inc    vsys   vinf   hv
    dp = numpy.array([0.0001, 0.0001, 0.001, 0.001, 0.001, 0.001, 0.0001])

    n = 101
    x = numpy.arange(n, dtype=float)[::-1] - n//2
    y = numpy.arange(n, dtype=float) - n//2
    x, y = numpy.meshgrid(x, y)

    v, dv = disk.deriv_model(disk.par, x=x, y=y)
    vp = numpy.empty(v.shape+(disk.par.size,), dtype=float)
    p = disk.par.copy()
    for i in range(disk.par.size):
        _p = p.copy()
        _p[i] += dp[i]
        # These calls to `model` reuse the previously provided x and y
        vp[...,i] = disk.model(_p)
    disk._set_par(p)

    fd_dv = (vp - v[...,None])/dp[None,:]
    for i in range(disk.par.size):
        assert numpy.allclose(dv[...,i], fd_dv[...,i], rtol=0., atol=1e-4), \
                f'Finite difference produced different derivative for parameter {i+1}!'

    # Now include the beam-smearing
    beam = gauss2d_kernel(n, 3.)
    try:
        cnvfftw = ConvolveFFTW(beam.shape)
    except:
        cnvfftw = None
    v, dv = disk.deriv_model(disk.par, x=x, y=y, beam=beam, cnvfftw=cnvfftw)
    vp = numpy.empty(v.shape+(disk.par.size,), dtype=float)
    p = disk.par.copy()
    for i in range(disk.par.size):
        _p = p.copy()
        _p[i] += dp[i]
        # These calls to `model` reuse the previously provided x, y, beam, and
        # cnvfftw
        vp[...,i] = disk.model(_p)
    disk._set_par(p)

    fd_dv = (vp - v[...,None])/dp[None,:]
    for i in range(disk.par.size):
        assert numpy.allclose(dv[...,i], fd_dv[...,i], rtol=0., atol=1e-4), \
                f'Finite difference produced different derivative for parameter {i+1}!'
示例#13
0
def main(args):

    nirvana_root = 'nirvana-manga-axisym'

    # Check input directory and attempt to find files to collate
    if not os.path.isdir(args.dir):
        raise NotADirectoryError(f'{args.dir} does not exist!')
    files = [
        str(p.resolve())
        for p in pathlib.Path(args.dir).rglob(f'{nirvana_root}*.fits.gz')
    ]
    if len(files) == 0:
        raise ValueError(
            f'No files found with the expected naming convention in {args.dir}!'
        )

    # Use the first file in the list to get the rc and dc class names, and check
    # that it is an AxisymmetricDisk model
    with fits.open(files[0]) as hdu:
        if hdu[0].header['MODELTYP'] != 'AxisymmetricDisk':
            raise ValueError(
                'First file does not set MODELTYP=AxisymmetricDisk!')
        rc_class = hdu[0].header['RCMODEL']
        dc_class = hdu[0].header['DCMODEL'] if 'DCMODEL' in hdu[
            0].header else None

    oroot = np.unique([os.path.dirname(os.path.dirname(f)) for f in files])
    if len(oroot) > 1:
        raise ValueError(
            'Currently cannot handle more than one root directory.')
    oroot = oroot[0]

    # Get the list of plates, ifus, and tracers
    if not args.full:
        pltifutrc = np.array([
            '-'.join(os.path.basename(f).split('.')[0].split('-')[-3:])
            for f in files
        ])
        if pltifutrc.size != np.unique(pltifutrc).size:
            raise ValueError(
                f'All plate-ifu-tracer must be unique for files found in {args.dir}!'
            )
        plateifu = np.unique(['-'.join(p.split('-')[:2]) for p in pltifutrc])
    else:
        plateifu = None

    # Check the output file and determine if it is expected to be compressed
    if os.path.isfile(args.ofile) and not args.overwrite:
        raise FileExistsError(f'{args.ofile} already exists!')
    if args.ofile.split('.')[-1] == 'gz':
        _ofile = args.ofile[:args.ofile.rfind('.')]
        compress = True
    else:
        _ofile = args.ofile

    # Attempt to find the DRPall and DAPall files
    if args.dapall is None:
        _dapall_path = manga_paths(0,
                                   0,
                                   dr=args.dr,
                                   analysis_path=args.analysis)[3]
        _dapall_file = manga_file_names(0, 0, dr=args.dr)[3]
        _dapall_file = os.path.join(_dapall_path, _dapall_file) \
                        if args.dapall is None else args.dapall
    else:
        _dapall_file = args.dapall
    if not os.path.isfile(_dapall_file):
        raise FileNotFoundError(f'{_dapall_file} does not exist!')

    # Check the input rotation curve and dispersion profile parameterization
    func1d_classes = all_subclasses(Func1D)
    func1d_class_names = [c.__name__ for c in func1d_classes]
    if rc_class not in func1d_class_names:
        raise ValueError(f'{rc_class} is not a known parameterization.')
    if dc_class is not None and dc_class not in func1d_class_names:
        raise ValueError(f'{dc_class} is not a known parameterization.')
    disk = AxisymmetricDisk(
        rc=func1d_classes[func1d_class_names.index(rc_class)](),
        dc=None if dc_class is None else
        func1d_classes[func1d_class_names.index(dc_class)]())

    # Get the data type for the output table
    _dtype = _fit_meta_dtype(disk.par_names(short=True))
    meta_keys = [d[0] for d in _dtype]
    _dtype += [('DRPALLINDX', np.int), ('DAPALLINDX', np.int),
               ('FINISHED', np.int), ('QUAL', np.int)]

    # Read the DAPall file
    with fits.open(_dapall_file) as hdu:
        dapall = hdu[args.daptype].data

    # Generate the list of observations to fit
    indx = np.where(dapall['DAPDONE'])[0] if args.full \
                else np.array([np.where(dapall['PLATEIFU'] == p)[0][0] for p in plateifu])

    gas_metadata = fileio.init_record_array(indx.size, _dtype)
    str_metadata = fileio.init_record_array(indx.size, _dtype)

    for i, j in enumerate(indx):
        print(f'Collating {i+1}/{indx.size}', end='\r')
        plate = dapall['PLATE'][j]
        ifu = dapall['IFUDESIGN'][j]

        gas_metadata['DRPALLINDX'][i] = dapall['DRPALLINDX'][j]
        gas_metadata['DAPALLINDX'][i] = j
        gas_file = os.path.join(oroot, str(plate),
                                f'{nirvana_root}-{plate}-{ifu}-Gas.fits.gz')
        if os.path.isfile(gas_file):
            with fits.open(gas_file) as hdu:
                # Confirm the output has the expected model parameterization
                if hdu[0].header['MODELTYP'] != 'AxisymmetricDisk' \
                        or hdu[0].header['RCMODEL'] != rc_class \
                        or dc_class is not None and hdu[0].header['DCMODEL'] != dc_class \
                        or dc_class is None and 'DCMODEL' in hdu[0].header:
                    warnings.warn(
                        f'{gas_file} is not an AxisymmetricDisk fit with the same model '
                        f'parameterization as determined by the template file, '
                        f'{files[0]}.  Skipping this file.')
                    # FINISHED will already be set to 0; set QUAL to 2 (define
                    # this in AxisymmetricDiskGlobalBitMask?)
                    gas_metadata['QUAL'][i] = 2
                else:
                    gas_metadata['FINISHED'][i] = 1
                    gas_metadata['QUAL'][i] = hdu[0].header['QUAL']
                    for k in meta_keys:
                        gas_metadata[k][i] = hdu['FITMETA'].data[k][0]
        else:
            # FINISHED will already be set to 0; set QUAL to 2 (define this in
            # AxisymmetricDiskGlobalBitMask?)
            gas_metadata['QUAL'][i] = 2
            # And save the identifier information
            gas_metadata['MANGAID'][i] = dapall['MANGAID'][j]
            gas_metadata['PLATE'][i] = plate
            gas_metadata['IFU'][i] = ifu

        str_metadata['DRPALLINDX'][i] = dapall['DRPALLINDX'][j]
        str_metadata['DAPALLINDX'][i] = j
        str_file = os.path.join(oroot, str(plate),
                                f'{nirvana_root}-{plate}-{ifu}-Stars.fits.gz')
        if os.path.isfile(str_file):
            with fits.open(str_file) as hdu:
                # Confirm the output has the expected model parameterization
                if hdu[0].header['MODELTYP'] != 'AxisymmetricDisk' \
                        or hdu[0].header['RCMODEL'] != rc_class \
                        or dc_class is not None and hdu[0].header['DCMODEL'] != dc_class \
                        or dc_class is None and 'DCMODEL' in hdu[0].header:
                    warnings.warn(
                        f'{str_file} is not an AxisymmetricDisk fit with the same model '
                        f'parameterization as determined by the template file, '
                        f'{files[0]}.  Skipping this file.')
                    # FINISHED will already be set to 0; set QUAL to 2 (define
                    # this in AxisymmetricDiskGlobalBitMask?)
                    str_metadata['QUAL'][i] = 2
                else:
                    str_metadata['FINISHED'][i] = 1
                    str_metadata['QUAL'][i] = hdu[0].header['QUAL']
                    for k in meta_keys:
                        str_metadata[k][i] = hdu['FITMETA'].data[k][0]
        else:
            # FINISHED will already be set to 0; set QUAL to 2 (define this in
            # AxisymmetricDiskGlobalBitMask?)
            str_metadata['QUAL'][i] = 2
            # And save the identifier information
            str_metadata['MANGAID'][i] = dapall['MANGAID'][j]
            str_metadata['PLATE'][i] = plate
            str_metadata['IFU'][i] = ifu

    print(f'Collating {indx.size}/{indx.size}')

    # TODO: Add more to the headers?
    hdr = fits.Header()
    hdr['MODELTYP'] = 'AxisymmetricDisk'
    hdr['RCMODEL'] = rc_class
    if dc_class is not None:
        hdr['DCMODEL'] = dc_class
    fits.HDUList([
        fits.PrimaryHDU(header=hdr),
        fits.BinTableHDU.from_columns([
            fits.Column(name=n,
                        format=fileio.rec_to_fits_type(gas_metadata[n]),
                        array=gas_metadata[n])
            for n in gas_metadata.dtype.names
        ],
                                      name='GAS',
                                      header=hdr),
        fits.BinTableHDU.from_columns([
            fits.Column(name=n,
                        format=fileio.rec_to_fits_type(str_metadata[n]),
                        array=str_metadata[n])
            for n in str_metadata.dtype.names
        ],
                                      name='STARS',
                                      header=hdr)
    ]).writeto(_ofile, overwrite=True, checksum=True)
    if compress:
        # Compress the file
        fileio.compress_file(_ofile, overwrite=True)
        # Get rid of the uncompressed file
        os.remove(_ofile)