Пример #1
0
def test_calculate_covariance():

    assert script_help_okay(
        'dap_calculate_covariance'), 'Basic help call failed'

    ofile = 'test.fits'
    if os.path.isfile(ofile):
        # Clean-up a previous failure
        os.remove(ofile)
    # Defaults to central channel
    # TODO: Test contents?
    calculate_covariance.main(
        calculate_covariance.parse_args(
            ['7815', '3702', ofile, '-d',
             remote_data_file()]))
    assert os.path.isfile(ofile), 'Output file not written.'

    # Do a number of channels
    os.remove(ofile)
    calculate_covariance.main(
        calculate_covariance.parse_args(
            ['7815', '3702', ofile, '-n', '11', '-d',
             remote_data_file()]))
    assert os.path.isfile(ofile), 'Output file not written.'

    # Run a specific wavelength
    os.remove(ofile)
    calculate_covariance.main(
        calculate_covariance.parse_args(
            ['7815', '3702', ofile, '-w', '4500.', '-d',
             remote_data_file()]))
    assert os.path.isfile(ofile), 'Output file not written.'

    # Clean-up
    os.remove(ofile)
Пример #2
0
def test_stats():
    rss = MaNGARSS.from_plateifu(7815, 3702, directory_path=remote_data_file())

    method = ReductionAssessmentDef()
    #    methods = available_reduction_assessments()
    #    i = numpy.where([m['key'] == 'SNRG' for m in methods])[0]
    #    assert len(i) == 1, 'Could not find correct reduction assessment definition.'

    cenwave = rss.central_wavelength(response_func=method.response)
    assert numpy.isclose(cenwave,
                         4686.2), 'Central wavelength calculation changed'

    sig, var, snr = rss.flux_stats(response_func=method.response)

    assert sig.shape == (
        rss.nspec, ), 'Should be one measurement per spectrum.'
    assert isinstance(sig, numpy.ma.MaskedArray), 'Expected masked arrays'
    assert numpy.ma.amax(snr) > 15, 'S/N changed'
    assert numpy.ma.median(snr) < 3, 'S/N changed'

    # Try it with the linear rss
    rss = MaNGARSS.from_plateifu(7815,
                                 3702,
                                 directory_path=remote_data_file(),
                                 log=False)
    _sig, _var, _snr = rss.flux_stats(response_func=method.response)
    # TODO: Not sure why these are not closer.
    assert numpy.absolute(numpy.ma.median((sig-_sig)/_sig)) < 0.01, \
            'Signal should be the same to better than 1%.'
    assert numpy.absolute(numpy.ma.median((var-_var)/_var)) < 0.03, \
            'Variance should be the same to better than 3%.'
    assert numpy.absolute(numpy.ma.median((snr-_snr)/_snr)) < 0.02, \
            'S/N should be the same to better than 2%.'
Пример #3
0
def test_stats():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file())

    # Create a fake bin map
    bin_indx = numpy.arange(cube.nspec / 4,
                            dtype=int).reshape(cube.spatial_shape[0] // 2,
                                               cube.spatial_shape[0] // 2)
    bin_indx = numpy.repeat(bin_indx, 2, axis=0)
    bin_indx = numpy.repeat(bin_indx, 2, axis=1)

    # Get the bin area
    bins, area = cube.binned_on_sky_area(bin_indx)

    assert numpy.array_equal(bins,
                             numpy.arange(cube.nspec / 4)), 'Bad bin list'
    assert numpy.allclose(area, 1.), 'Bad area calculation'

    methods = available_reduction_assessments()
    i = numpy.where([m['key'] == 'SNRG' for m in methods])[0]
    assert len(
        i) == 1, 'Could not find correct reduction assessment definition.'

    cen_wave = cube.central_wavelength(
        response_func=methods[i[0]]['response_func'],
        flag=cube.do_not_use_flags())
    assert numpy.isclose(cen_wave, 4638.0), 'Central wavelength changed.'

    cen_wave = cube.central_wavelength(waverange=[4000, 8000],
                                       flag=cube.do_not_use_flags(),
                                       fluxwgt=True)
    assert numpy.isclose(cen_wave, 5895.7), 'Central wavelength changed.'

    cen_wave = cube.central_wavelength(waverange=[4000, 8000],
                                       flag=cube.do_not_use_flags(),
                                       per_pixel=False)
    assert numpy.isclose(cen_wave, 6044.9), 'Central wavelength changed.'

    sig, var, snr = cube.flux_stats(
        response_func=methods[i[0]]['response_func'])
    assert sig.shape == cube.spatial_shape, 'Should be shaped as a map.'
    assert isinstance(sig, numpy.ma.MaskedArray), 'Expected masked arrays'
    assert numpy.ma.amax(snr) > 60, 'S/N changed'

    # Try it with the linear cube
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file(),
                                       log=False)
    _sig, _var, _snr = cube.flux_stats(
        response_func=methods[i[0]]['response_func'])
    # TODO: Not sure why these are not closer.
    assert numpy.absolute(numpy.ma.median((sig-_sig)/_sig)) < 0.01, \
            'Signal should be the same to better than 1%.'
    assert numpy.absolute(numpy.ma.median((var-_var)/_var)) < 0.03, \
            'Variance should be the same to better than 3%.'
    assert numpy.absolute(numpy.ma.median((snr-_snr)/_snr)) < 0.02, \
            'S/N should be the same to better than 2%.'
Пример #4
0
def test_from_file():
    file = remote_data_file('manga-7815-3702-LOGCUBE.fits.gz')
    cfg = MaNGAConfig.from_file(file)

    assert isinstance(cfg.directory_path,
                      Path), 'Directory should be a Path instance'
    assert cfg.directory_path == Path(
        remote_data_file()).resolve(), 'Directory path changed'
    assert cfg.plate == 7815, 'Plate changed'
    assert cfg.log, 'Log binning should be true'
    assert cfg.mode == 'CUBE', 'Should be in CUBE mode'
    assert cfg.file_name == 'manga-7815-3702-LOGCUBE.fits.gz'
Пример #5
0
def test_wcs():
    rss = MaNGARSS.from_plateifu(7815, 3702, directory_path=remote_data_file())
    # Unrestricted
    x, y = rss.mean_sky_coordinates()

    methods = available_reduction_assessments()
    i = numpy.where([m['key'] == 'SNRG' for m in methods])[0]
    assert len(i) == 1, 'Could not find correct reduction assessment definition.'
    # Weighted by the g-band
    _x, _y = rss.mean_sky_coordinates(response_func=methods[i[0]]['response_func'])
    assert numpy.ma.amax(x-_x) - numpy.ma.amin(x-_x) > 0, 'Should be different'
    assert numpy.ma.amax(y-_y) - numpy.ma.amin(y-_y) > 0, 'Should be different'

    # Find a cluster of dithers
    d = numpy.ma.sqrt(numpy.square(_x) + numpy.square(_y))
    srt = numpy.argsort(d)
    theta = numpy.arctan2(-_y, _x)
    indx = theta[srt[:10]] < -2
    assert numpy.sum(indx) == 5, 'Should find close fiber positions'
    indx = srt[:10][indx]
    bin_indx = numpy.full(rss.nspec, -1, dtype=int)
    bin_indx[indx] = 0

    bins, area = rss.binned_on_sky_area(bin_indx, response_func=methods[i[0]]['response_func'])
    assert numpy.array_equal(bins, [0]), 'Should only be one bin'
    try:
        import shapely
    except:
        # Could not import shapely for proper calcultion, so test the stupid calculation
        assert area[0] == numpy.sum(rss.area[bin_indx > -1]), \
                'Stupid calculation is just the sum of the fiber area'
    else:
        # Check the proper calculation
        assert area[0] < numpy.sum(rss.area[bin_indx > -1]), \
                'Area should be substantially smaller than the stupid calculation yields.'
Пример #6
0
def test_read_lin():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file(),
                                       log=False)
    assert not cube.log, 'Wavelength sampling should be linear'
    assert numpy.isclose(numpy.std(numpy.diff(cube.wave)), 0.), \
                'Wavelength sampling should be linear'
Пример #7
0
def test_sres_ext():
    file = remote_data_file(filename=MaNGARSS.build_file_name(7815, 3702, log=True))
    hdu = fits.open(file)
    assert MaNGARSS.spectral_resolution_extension(hdu) == 'LSFPRE', \
                'Bad spectral resolution extension selection'
    assert MaNGARSS.spectral_resolution_extension(hdu, ext='SPECRES') == 'SPECRES', \
                'Bad spectral resolution extension selection'
    assert MaNGARSS.spectral_resolution_extension(hdu, ext='junk') is None, \
                'Should return None for a bad extension name.'
Пример #8
0
def test_sres_ext():
    cfg = MaNGAConfig(7815, 3702)
    file = remote_data_file(filename=cfg.file_name)
    hdu = fits.open(file)
    assert MaNGAConfig.spectral_resolution_extension(hdu) == 'LSFPRE', \
                'Bad spectral resolution extension selection'
    assert MaNGAConfig.spectral_resolution_extension(hdu, ext='SPECRES') == 'SPECRES', \
                'Bad spectral resolution extension selection'
    assert MaNGAConfig.spectral_resolution_extension(hdu, ext='junk') is None, \
                'Should return None for a bad extension name.'
Пример #9
0
def test_copyto():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file())
    flux = cube.copy_to_array()
    assert not isinstance(flux,
                          numpy.ma.MaskedArray), 'Should output normal array'
    assert flux.shape[0] == cube.nspec, 'Should be flattened into a 2D array.'
    assert flux.shape[1] == cube.nwave, 'Should be flattened into a 2D array.'

    # Apply a wavelength mask
    waverange = [5000, 7000]
    flux = cube.copy_to_array(waverange=waverange)
    indx = (cube.wave > waverange[0]) & (cube.wave < waverange[1])
    assert flux.shape[1] == numpy.sum(indx), 'Wavelength range masking failed'

    # Find the spaxels with non-zero signal
    methods = available_reduction_assessments()
    i = numpy.where([m['key'] == 'SNRG' for m in methods])[0]
    assert len(
        i) == 1, 'Could not find correct reduction assessment definition.'
    sig, var, snr = cube.flux_stats(
        response_func=methods[i[0]]['response_func'])
    indx = ((sig > 0) & numpy.invert(numpy.ma.getmaskarray(sig))).data.ravel()
    ngood = numpy.sum(indx)

    # Select the spaxels with non-zero signal
    flux = cube.copy_to_array(waverange=waverange, select_bins=indx)
    assert flux.shape[0] == ngood, 'Bin selection failed'

    # Get the masked array
    flux = cube.copy_to_masked_array()
    assert isinstance(flux,
                      numpy.ma.MaskedArray), 'Should output a masked array'
    assert flux.shape[0] == cube.nspec, 'Should be flattened into a 2D array.'
    assert flux.shape[1] == cube.nwave, 'Should be flattened into a 2D array.'

    # Select the spaxels with non-zero signal
    flux = cube.copy_to_masked_array(select_bins=indx)
    assert flux.shape[0] == ngood, 'Bin selection failed'

    # Try to get the inverse variance
    i = cube.nspec // 2 + cube.spatial_shape[1] // 2
    ivar = cube.copy_to_masked_array(attr='ivar')
    assert ivar.shape == (cube.nspec, cube.nwave), 'Bad ivar shape'
    assert numpy.array_equal(
        cube.ivar[numpy.unravel_index(i, cube.spatial_shape)],
        ivar[i].data), 'Did not pull ivar data.'

    # Try to get the spectral resolution
    sres = cube.copy_to_masked_array(attr='sres')
    assert sres.shape == (cube.nspec, cube.nwave), 'Bad sres shape'
    assert numpy.array_equal(
        cube.sres[numpy.unravel_index(i, cube.spatial_shape)],
        sres[i].data), 'Did not pull sres data.'
Пример #10
0
def test_match_resolution():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file())
    tpl = TemplateLibrary('MILESHC',
                          cube=cube,
                          match_resolution=True,
                          velscale_ratio=4,
                          hardcopy=False,
                          output_path=remote_data_file())

    # Resolution should be virtually identical in unmasked regions
    indx = tpl['MASK'].data == 0
    assert numpy.std(tpl.sres(tpl['WAVE'].data[indx[0]]) - tpl['SPECRES'].data[0,indx[0]]) < 0.1, \
                'Spectral resolution difference is above tolerance.'

    # Check the file that would have been written has the expected path
    assert cube.directory_path == tpl.directory_path, 'Cube and TPL paths should match.'
    assert tpl.file_name().startswith(
        cube.output_root), 'TPL file should start with the cube root'
Пример #11
0
def test_rectification_shape():
    # Load the datacube and the row-stacked spectra
    cube = MaNGADataCube.from_plateifu(7815, 3702, directory_path=remote_data_file())
    cube.load_rss()

    # Get the recitification parameters
    pixelscale, rlim, sigma, recenter, width_buffer \
            = MaNGARSS._parse_rectification_parameters(None, None, None, None, None)
    # Get the cube dimensions
    cube.rss._cube_dimensions(pixelscale=pixelscale, recenter=recenter, width_buffer=width_buffer)
    # Make sure they match what the DRP produced
    assert cube.spatial_shape == (cube.rss.nx, cube.rss.ny), 'Mismatched cube spatial dimensions'
Пример #12
0
def test_read():
    rss = MaNGARSS.from_plateifu(7815, 3702, directory_path=remote_data_file())

    assert rss.log, 'Should read the log-binned version by default.'
    assert len(rss.shape) == 2, 'Row-stacked spectra are 2D'
    assert rss.shape == (rss.nspec, rss.nwave), 'Shape mismatch'
    assert rss.sres is not None, 'Spectral resolution data was not constructed.'
    assert rss.sres_ext == 'LSFPRE', 'Should default to LSFPRE extension.'
    assert rss.xpos.shape == rss.shape, 'On-sky coordinates are wavelength-dependent'
    assert numpy.all(rss.area == numpy.pi), 'Area is pi square arcsec'
    assert rss.area.shape == (rss.nspec, ), 'Area is wavelength-independent'

    assert numpy.all(numpy.absolute(numpy.asarray(rss.pointing_offset())) < 0.01), \
                'Pointing offset for this observation should be small.'
Пример #13
0
def test_read_correl():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file(),
                                       covar_ext='GCORREL')
    assert isinstance(cube.covar, Covariance), 'Incorrect type for covariance.'
    assert cube.covar.shape == (cube.nspec,
                                cube.nspec), 'Covariance has incorrect shape.'
    assert cube.covar.is_correlation, 'Covariance object should be in a correlation mode.'

    # Check that the variances are all unity (or close to it when it's defined)
    unique_var = numpy.unique(cube.covar.var)
    assert numpy.allclose(unique_var[unique_var > 0],
                          1.), 'Bad variance values'
Пример #14
0
def test_match_resolution():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file())
    tpl = TemplateLibrary('MILESHC',
                          cube=cube,
                          match_resolution=True,
                          velscale_ratio=4,
                          hardcopy=False)

    # Resolution should be virtually identical in unmasked regions
    indx = tpl['MASK'].data == 0
    assert numpy.std(tpl.sres(tpl['WAVE'].data[indx[0]]) - tpl['SPECRES'].data[0,indx[0]]) < 0.1, \
                'Spectral resolution difference is above tolerance.'
Пример #15
0
def test_write_cfg():
    ofile = 'test.ini'
    if os.path.isfile(ofile):
        # Remove existing files from failed tests
        os.remove(ofile)

    # Read the test DRPComplete file.
    # TODO: Temporarily override
    drp_test_version = 'v3_0_1'
    drpc = DRPComplete(drpver=drp_test_version,
                       directory_path=remote_data_file(),
                       readonly=True)

    # Write the base-level configuration file
    drpc.write_config(ofile, plate=7815, ifudesign=3702)

    # Read it and check the output
    cfg = DefaultConfig(ofile)
    assert cfg.getint('plate') == 7815, 'Plate number is wrong'
    assert cfg.getbool(
        'log'), 'Should be selecting the logarithmically binned data'
    assert cfg.get(
        'sres_ext'
    ) is None, 'Spectral resolution extension should be undefined'
    assert cfg.getfloat('z') == 2.9382300e-02, 'Bad redshift'

    # Try to write it again with overwrite set to False
    with pytest.raises(FileExistsError):
        drpc.write_config(ofile, plate=7815, ifudesign=3702, overwrite=False)

    # Set the spectral resolution flags
    drpc.write_config(ofile,
                      plate=7815,
                      ifudesign=3702,
                      overwrite=True,
                      sres_ext='SPECRES',
                      sres_fill=False)

    # Read it and check the output
    cfg = DefaultConfig(ofile)
    assert cfg.getint('plate') == 7815, 'Plate number is wrong'
    assert cfg.getbool(
        'log'), 'Should be selecting the logarithmically binned data'
    assert cfg.get(
        'sres_ext') == 'SPECRES', 'Spectral resolution extension incorrect'
    assert cfg.getfloat('z') == 2.9382300e-02, 'Bad redshift'

    # Clean-up
    os.remove(ofile)
Пример #16
0
def test_read_drp():
    cfg = MaNGAConfig(7815, 3702)
    drpfile = remote_data_file(cfg.file_name)
    assert os.path.isfile(drpfile), 'Did not find file'

    with fits.open(drpfile) as hdu:
        covar = Covariance.from_fits(hdu, ivar_ext=None, covar_ext='GCORREL', impose_triu=True,
                                     correlation=True)
        var = numpy.ma.power(hdu['IVAR'].data[hdu['GCORREL'].header['BBINDEX']].T.ravel(),
                             -1).filled(0.0)

    covar = covar.apply_new_variance(var)
    covar.revert_correlation()

    assert numpy.array_equal(var, numpy.diag(covar.toarray())), 'New variance not applied'
Пример #17
0
def test_copyto():
    rss = MaNGARSS.from_plateifu(7815, 3702, directory_path=remote_data_file())
    flux = rss.copy_to_array()
    assert not isinstance(flux,
                          numpy.ma.MaskedArray), 'Should output normal array'
    assert flux.shape == rss.shape, 'Both should be 2D arrays.'

    # Apply a wavelength mask
    waverange = [5000, 7000]
    flux = rss.copy_to_array(waverange=waverange)
    indx = (rss.wave > waverange[0]) & (rss.wave < waverange[1])
    assert flux.shape[1] == numpy.sum(indx), 'Wavelength range masking failed'

    # Find the spaxels with non-zero signal
    method = ReductionAssessmentDef()
    #    methods = available_reduction_assessments()
    #    i = numpy.where([m['key'] == 'SNRG' for m in methods])[0]
    #    assert len(i) == 1, 'Could not find correct reduction assessment definition.'
    sig, var, snr = rss.flux_stats(response_func=method.response)
    indx = ((sig > 0) & numpy.invert(numpy.ma.getmaskarray(sig))).data.ravel()
    ngood = numpy.sum(indx)

    # Select the spaxels with non-zero signal
    flux = rss.copy_to_array(waverange=waverange, select_bins=indx)
    assert flux.shape[0] == ngood, 'Bin selection failed'

    # Get the masked array
    flux = rss.copy_to_masked_array()
    assert isinstance(flux,
                      numpy.ma.MaskedArray), 'Should output a masked array'
    assert flux.shape == rss.shape, 'Both should be 2D arrays.'

    # Select the spaxels with non-zero signal
    flux = rss.copy_to_masked_array(select_bins=indx)
    assert flux.shape[0] == ngood, 'Bin selection failed'

    # Try to get the inverse variance
    i = rss.nspec // 2
    ivar = rss.copy_to_masked_array(attr='ivar')
    assert ivar.shape == rss.shape, 'Bad ivar shape'
    assert numpy.array_equal(rss.ivar[i],
                             ivar[i].data), 'Did not pull ivar data.'

    # Try to get the spectral resolution
    sres = rss.copy_to_masked_array(attr='sres')
    assert sres.shape == rss.shape, 'Bad sres shape'
    assert numpy.array_equal(rss.sres[i],
                             sres[i].data), 'Did not pull sres data.'
Пример #18
0
def test_wcs():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file())
    x, y = cube.mean_sky_coordinates(offset=None)
    assert x[0, 0] > x[-1, 0], 'RA should increase from large to small indices'
    assert y[0, 0] < y[0,
                       -1], 'DEC should increase from small to small indices'
    assert numpy.unravel_index(numpy.argmin( numpy.square(x - cube.prihdr['OBJRA'])
                                            + numpy.square(y - cube.prihdr['OBJDEC'])), x.shape) \
                == (21,21), 'Object should be at cube center.'
    x, y = cube.mean_sky_coordinates(center_coo=(x[0, 0], y[0, 0]))
    assert numpy.isclose(x[0, 0], 0.0) and numpy.isclose(
        y[0, 0], 0.0), 'Offset incorrect'
    x, y = cube.mean_sky_coordinates()
    assert abs(x[21, 21]) < 1e-2 and abs(y[21, 21]) < 1e-2, 'Offset incorrect'
Пример #19
0
def test_write_dap_config_drpall():
    ofile = 'test.ini'
    if os.path.isfile(ofile):
        # Clean-up a previous failure
        os.remove(ofile)

    drpall = remote_data_file('drpall-{0}.fits'.format(drp_test_version))
    WriteDapConfig.main(WriteDapConfig.parse_args(['7815', '3702', ofile, '-a', drpall]))

    assert os.path.isfile(ofile), 'Output file not written.'

    cfg = DefaultConfig(ofile)
    assert cfg.getint('plate') == 7815, 'Wrong plate number'
    assert cfg.get('directory_path') is None, 'No directory path should be defined.'
    assert cfg.getfloat('z') == 0.0293823, 'Bad redshift'

    # Clean-up a previous failure
    os.remove(ofile)
Пример #20
0
def test_rectification_recovery():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file(),
                                       covar_ext='GCORREL')
    cube.load_rss()

    hdu = fits.open(cube.file_path())
    channel = hdu['GCORREL'].header['BBINDEX']

    gcorrel = numpy.zeros(eval(hdu['GCORREL'].header['COVSHAPE']), dtype=float)
    i = numpy.ravel_multi_index(
        (hdu['GCORREL'].data['INDXI_C1'], hdu['GCORREL'].data['INDXI_C2']),
        cube.spatial_shape)

    j = numpy.ravel_multi_index(
        (hdu['GCORREL'].data['INDXJ_C1'], hdu['GCORREL'].data['INDXJ_C2']),
        cube.spatial_shape)
    gcorrel[i, j] = hdu['GCORREL'].data['RHOIJ']
    gcorrel[j, i] = hdu['GCORREL'].data['RHOIJ']

    assert numpy.allclose(cube.covar.toarray(), gcorrel), 'Bad covariance read'

    flux, C = cube.rss.rectify_wavelength_plane(channel, return_covar=True)
    assert numpy.allclose(cube.flux[..., channel],
                          flux), 'Bad flux rectification'

    ivar = numpy.ma.power(C.variance().reshape(cube.spatial_shape),
                          -1).filled(0.0)
    assert numpy.allclose(cube.ivar[..., channel],
                          ivar), 'Bad inverse variance rectification'

    C.to_correlation()
    assert numpy.allclose(C.toarray(), gcorrel), 'Bad covariance calculation'

    sres = numpy.ma.divide(cube.rss.wave[channel],
                           cube.rss.instrumental_dispersion_plane(channel).ravel()) \
                / DAPConstants.sig2fwhm

    # WARNING: The computations done by the DRP and DAP are different
    # in detail, but (at least for this test cube) the results are
    # virtually identical except for notable outliers.
    assert numpy.ma.median(cube.sres[...,channel].ravel() - sres) < 0.1, \
            'Bad spectral resolution rectification'
Пример #21
0
def test_read():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file())

    assert cube.log, 'Should read the log-binned version by default.'
    assert cube.wcs is not None, 'WCS should be defined.'
    assert cube.shape[:
                      2] == cube.spatial_shape, 'Spatial shape should be first two axes.'
    assert cube.nspec == numpy.prod(
        cube.spatial_shape), 'Definition of number of spectra changed.'
    assert cube.sres is not None, 'Spectral resolution data was not constructed.'
    assert cube.sres_ext == 'LSFPRE', 'Should default to LSFPRE extension.'
    assert abs(cube.pixelscale -
               cube._get_pixelscale()) < 1e-6, 'Bad match in pixel scale.'
    # NOTE: This is worse than it should be because of how the WCS in MaNGA is defined.
    assert numpy.all(numpy.absolute(cube.wave - cube._get_wavelength_vector()) < 2e-4), \
            'Bad calculation of wavelength vector.'
    assert cube.covar is None, 'Covariance should not have been read'
Пример #22
0
def test_covariance():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file())

    with pytest.raises(ValueError):
        # Have to load the RSS first
        cube.covariance_matrix(1000)

    # Load the RSS
    cube.load_rss()

    # Construct a covariance matrix
    C = cube.covariance_matrix(1000)
    assert C.shape == (1764, 1764), 'Bad covariance shape'

    # Make it a correlation matrix and check it
    C.to_correlation()

    # Check that the variances are all unity (or close to it when it's defined)
    unique_var = numpy.unique(numpy.diag(C.toarray()))
    assert numpy.allclose(unique_var[unique_var > 0],
                          1.), 'Bad correlation diagonal'

    # Try multiple channels
    C = cube.covariance_cube(channels=[1000, 2000])
    assert numpy.array_equal(C.input_indx, [1000, 2000]), 'Bad matrix indices'
    assert C.shape == (1764, 1764, 2), 'Bad covariance shape'

    # Try to convert multiple channels
    C.to_correlation()
    # And reverting it
    C.revert_correlation()

    # Try to generate an approximate correlation matrix, covariance
    # matrix, and covariance cube
    approxC = cube.approximate_correlation_matrix()
    approxC = cube.approximate_covariance_matrix(1000)
    approxC = cube.approximate_covariance_cube(channels=[1000, 2000])

    # Variance should be the same for direct and approximate calculations
    assert numpy.allclose(approxC.variance(),
                          C.variance()), 'Variances should be the same.'
Пример #23
0
def main():

    overwrite = False
    if NETRC is None:
        usr, acc, passwd = None, None, None
    else:
        usr, acc, passwd = NETRC.authenticators(HOST)

    sas_dir = 'dr17' if NETRC is None else 'mangawork'
    sas_root = f'https://{HOST}/sas/{sas_dir}/manga/spectro'

    version = drp_test_version
    files = remote_data_files()
    plates = [f.split('-')[1] for f in files]

    local_root = remote_data_file()
    if not os.path.isdir(local_root):
        os.makedirs(local_root)

    # Get the spectral data
    for plate, f in zip(plates, files):
        url_root = f'{sas_root}/redux/{drp_test_version}/{plate}/stack/'
        try:
            download_file(url_root, usr, passwd, local_root, f, overwrite=overwrite)
        except Exception as e:
            print(str(e))
            continue

    # Get the DRPComplete file
    f = f'drpcomplete_{drp_test_version}.fits'
    url_root = f'{sas_root}/analysis/{drp_test_version}/{dap_test_version}/common/'
    try:
        download_file(url_root, usr, passwd, local_root, f, overwrite=overwrite)
    except Exception as e:
        print(str(e))

    # Get the DRPall file
    f = f'drpall-{drp_test_version}.fits'
    url_root = f'{sas_root}/redux/{drp_test_version}/'
    try:
        download_file(url_root, usr, passwd, local_root, f, overwrite=overwrite)
    except Exception as e:
        print(str(e))
Пример #24
0
def test_write_dap_config_drpcomplete():
    ofile = 'test.ini'
    if os.path.isfile(ofile):
        # Clean-up a previous failure
        os.remove(ofile)

    # TODO: Temporarily override
    drp_test_version = 'v2_7_1'
    drpc = remote_data_file('drpcomplete_{0}.fits'.format(drp_test_version))
    write_dap_config.main(
        write_dap_config.parse_args(['7815', '3702', ofile, '-c', drpc]))

    assert os.path.isfile(ofile), 'Output file not written.'

    cfg = DefaultConfig(ofile)
    assert cfg.getint('plate') == 7815, 'Wrong plate number'
    assert cfg.get(
        'directory_path') is None, 'No directory path should be defined.'
    assert cfg.getfloat('z') == 0.0293823, 'Bad redshift'

    # Clean-up a previous failure
    os.remove(ofile)
Пример #25
0
def test_covariance():
    rss = MaNGARSS.from_plateifu(7815, 3702, directory_path=remote_data_file())

    # Construct a covariance matrix
    C = rss.covariance_matrix(1000)
    assert C.shape == (1764, 1764), 'Bad covariance shape'

    # Make it a correlation matrix and check it
    C.to_correlation()

    # Check that the variances are all unity (or close to it when it's defined)
    unique_var = numpy.unique(numpy.diag(C.toarray()))
    assert numpy.allclose(unique_var[unique_var>0], 1.), 'Bad correlation diagonal'

    # Try multiple channels
    C = rss.covariance_cube(channels=[1000,2000])
    assert numpy.array_equal(C.input_indx, [1000,2000]), 'Bad matrix indices'
    assert C.shape == (1764, 1764, 2), 'Bad covariance shape'

    # Try to convert multiple channels
    C.to_correlation()
    # And reverting it
    C.revert_correlation()
Пример #26
0
def test_load_rss():
    cube = MaNGADataCube.from_plateifu(7815,
                                       3702,
                                       directory_path=remote_data_file())
    cube.load_rss()