コード例 #1
0
ファイル: pipeline.py プロジェクト: crawfordsm/saltfppipe
def make_median(fnlist,outfile):
    """Creates a median image from a series of images.
    
    Inputs:
    fnlist -> List of strings pointing to fits images
    outfile -> Location of resultant median image
    
    """
    
    imagelist = []
    datalist = []
    for i in range(len(fnlist)):
        imagelist.append(openfits(fnlist[i]))
        datalist.append(imagelist[i][0].data)
    datalist = np.array(datalist)
    
    meddata = np.median(datalist,axis=0)
    
    medhdu = PrimaryHDU(meddata)
    medhdu.writeto(outfile,clobber=True)
    
    for i in range(len(fnlist)):
        imagelist[i].close()
    
    return
コード例 #2
0
ファイル: factory.py プロジェクト: mmorage/DRAGONS
    def createFromScratch(self, phu, extensions=None):
        """
        Creates an AstroData object from a collection of objects.
        """

        lst = HDUList()
        if phu is not None:
            if isinstance(phu, PrimaryHDU):
                lst.append(phu)
            elif isinstance(phu, Header):
                lst.append(PrimaryHDU(header=deepcopy(phu), data=DELAYED))
            elif isinstance(phu, (dict, list, tuple)):
                p = PrimaryHDU()
                p.header.update(phu)
                lst.append(p)
            else:
                raise ValueError(
                    "phu must be a PrimaryHDU or a valid header object")

        # TODO: Verify the contents of extensions...
        if extensions is not None:
            for ext in extensions:
                lst.append(ext)

        return self.getAstroData(lst)
コード例 #3
0
    def hdu(self):
        if self.wcs is None:
            hdu = PrimaryHDU(self.value)
        else:
            hdu = PrimaryHDU(self.value, header=self.header)
        hdu.header['BUNIT'] = self.unit.to_string(format='fits')

        if 'beam' in self.meta:
            hdu.header.update(self.meta['beam'].to_header_keywords())

        return hdu
コード例 #4
0
    def hdu(self):
        if self.wcs is None:
            hdu = PrimaryHDU(self.value)
        else:
            hdu = PrimaryHDU(self.value, header=self.wcs.to_header())
        hdu.header['BUNIT'] = self.unit.to_string(format='fits')

        if 'beam' in self.meta:
            hdu.header.update(self.meta['beam'].to_header_keywords())

        return hdu
コード例 #5
0
ファイル: test_image_dask.py プロジェクト: Gabriel-p/astropy
def test_dtypes(dask_array_in_mem, tmp_path, dtype):

    filename = tmp_path / 'test.fits'

    array = dask_array_in_mem.astype(dtype)

    hdu = PrimaryHDU(data=array)
    hdu.writeto(filename)

    with fits.open(filename) as hdulist_new:
        assert isinstance(hdulist_new[0].data, np.ndarray)
        np.testing.assert_allclose(hdulist_new[0].data, array.compute())
コード例 #6
0
ファイル: test_image_dask.py プロジェクト: Gabriel-p/astropy
def test_save_primary_hdu(dask_array_in_mem, tmp_path):

    # Saving a Primary HDU directly

    filename = tmp_path / 'test.fits'

    hdu = PrimaryHDU(data=dask_array_in_mem)
    hdu.writeto(filename)

    with fits.open(filename) as hdulist_new:
        assert isinstance(hdulist_new[0].data, np.ndarray)
        np.testing.assert_allclose(hdulist_new[0].data,
                                   dask_array_in_mem.compute())
コード例 #7
0
def read_or_generate_image(filename_base: str,
                           config: Config = Config.instance(),
                           recipe: Optional[Callable[[], Tuple[np.ndarray,
                                                               Table]]] = None,
                           force_generate=False):
    """
    For the 'recipe' either generate and write the image+catalogue or read existing output from disk
    :param directory: where to put/read data
    :param filename_base: what the files are called, minus extension
    :param recipe: function generating your image and catalogue
    :return: image, input_catalogue
    """

    if not exists(config.image_folder):
        mkdir(config.image_folder)
    image_name = join(config.image_folder, filename_base + '.fits')
    table_name = join(config.image_folder, filename_base + '.dat')
    lock = get_lock(abspath(image_name))
    with lock:
        if (exists(image_name) and exists(table_name)) and not force_generate:
            img = getdata_safer(image_name)
            table = Table.read(table_name, format='ascii.ecsv')
        else:
            with work_in(config.scopesim_working_dir):
                if not recipe:
                    recipe = predefined_images[filename_base]
                img, table = recipe()
            img = img.astype(np.float64, order='C', copy=False)
            PrimaryHDU(img).writeto(image_name, overwrite=True)
            table.write(table_name, format='ascii.ecsv', overwrite=True)

    return img, table
コード例 #8
0
ファイル: fits.py プロジェクト: Luke-Ludwig/DRAGONS
def ad_to_hdulist(ad):
    """Creates an HDUList from an AstroData object."""
    hdul = HDUList()
    hdul.append(PrimaryHDU(header=ad.phu, data=DELAYED))

    for ext in ad._nddata:
        meta = ext.meta
        header, ver = meta['header'], meta['ver']
        wcs = ext.wcs

        if isinstance(wcs, gWCS):
            # We don't have access to the AD tags so see if it's an image
            # Catch ValueError as any sort of failure
            try:
                wcs_dict = adwcs.gwcs_to_fits(ext, ad.phu)
            except (ValueError, NotImplementedError) as e:
                LOGGER.warning(e)
            else:
                # Must delete keywords if image WCS has been downscaled
                # from a higher number of dimensions
                for i in range(1, 5):
                    for kw in (f'CDELT{i}', f'CRVAL{i}', f'CUNIT{i}',
                               f'CTYPE{i}'):
                        if kw in header:
                            del header[kw]
                    for j in range(1, 5):
                        for kw in (f'CD{i}_{j}', f'PC{i}_{j}', f'CRPIX{j}'):
                            if kw in header:
                                del header[kw]
                header.update(wcs_dict)
                # Use "in" here as the dict entry may be (value, comment)
                if 'APPROXIMATE' not in wcs_dict.get('FITS-WCS', ''):
                    wcs = None  # There's no need to create a WCS extension

        hdul.append(new_imagehdu(ext.data, header, 'SCI'))
        if ext.uncertainty is not None:
            hdul.append(new_imagehdu(ext.uncertainty.array, header, 'VAR'))
        if ext.mask is not None:
            hdul.append(new_imagehdu(ext.mask, header, 'DQ'))

        if isinstance(wcs, gWCS):
            hdul.append(wcs_to_asdftablehdu(ext.wcs, extver=ver))

        for name, other in meta.get('other', {}).items():
            if isinstance(other, Table):
                hdul.append(table_to_bintablehdu(other))
            elif isinstance(other, np.ndarray):
                header = meta['other_header'].get(name, meta['header'])
                hdul.append(new_imagehdu(other, header, name=name))
            elif isinstance(other, NDDataObject):
                hdul.append(new_imagehdu(other.data, meta['header']))
            else:
                raise ValueError("I don't know how to write back an object "
                                 "of type {}".format(type(other)))

    if ad._tables is not None:
        for name, table in sorted(ad._tables.items()):
            hdul.append(table_to_bintablehdu(table, extname=name))

    return hdul
コード例 #9
0
ファイル: __init__.py プロジェクト: griffin-h/lcogtgemini
def tofits(filename, data, hdr=None, clobber=False):
    """simple pyfits wrapper to make saving fits files easier."""
    hdu = PrimaryHDU(data)
    if not (hdr is None):
        hdu.header += hdr
    hdulist = HDUList([hdu])
    hdulist.writeto(filename, overwrite=clobber, output_verify='ignore')
コード例 #10
0
def test_single_filename_fallback():
    hdu = PrimaryHDU()
    assert _get_source_identifiers_by_hdu(
        [hdu], "/path/to/test_file.fits") == ['test_file.fits']
    assert _get_source_identifiers_by_hdu(
        [hdu, hdu],
        "/path/to/test_file.fits") == ['test_file.fits', 'test_file.fits']
コード例 #11
0
def align(hduls, name="SCI", reference=None):
    """
    Aligns the source astronomical image(s) to the reference astronomical image
    \b
    :param hduls: list of fitsfiles
    :return: list of fistfiles with <name> HDU aligned
    """

    hduls_list = [hdul for hdul in hduls]
    sources = [hdul[name] for hdul in hduls_list]
    outputs = []

    if reference is None:
        reference = snr.snr(hduls_list, name)[name]
    # click.echo(reference.header["ORIGNAME"])
    # FIXME log ref name
    np_ref = to_np(
        reference,
        "Cannot align to unexpected type {}; expected numpy array or FITS HDU")

    for source in sources:
        np_src = to_np(
            source,
            "Cannot align unexpected type {}; expected numpy array or FITS HDU"
        )
        # possibly unneccessary but unsure about scoping
        output = np.array([])

        output = astroalign.register(np_src, np_ref)[0]
        if isinstance(source, HDU_TYPES):
            output = PrimaryHDU(output, source.header)
        outputs.append(HDUList([output]))

    return (hdul for hdul in outputs)
コード例 #12
0
    def test_HDU_input(self):

        hdu = PrimaryHDU(img, header=hdr)

        output = input_data(hdu)

        npt.assert_equal(img, output["data"])
        npt.assert_equal(hdr, output["header"])
コード例 #13
0
ファイル: test_kepler.py プロジェクト: ArtShp/DataScience
def fake_hdulist(extver=1, version=2, timesys="TDB", telescop="KEPLER"):
    new_header = fake_header(extver, version, timesys, telescop)
    return [
        HDUList(hdus=[
            PrimaryHDU(header=new_header),
            BinTableHDU(header=new_header, name="LIGHTCURVE")
        ])
    ]
コード例 #14
0
def plot_images(
        results: List[Tuple[np.ndarray, np.ndarray, Table, float]]) -> None:
    for i, (observed_image, residual_image, photometry_result,
            sigma) in enumerate(results):

        x_y_data = np.vstack(
            (photometry_result['x_fit'], photometry_result['y_fit'])).T

        if verbose:
            print(f"Got σ={sigma} for the PSF")
            print(
                f'found {len(x_y_data)} out of {stars_in_cluster} stars with photometry'
            )

        if output:
            PrimaryHDU(observed_image).writeto(
                f'{output_folder}/observed_{i:02d}.fits', overwrite=True)
            PrimaryHDU(residual_image).writeto(
                f'{output_folder}/residual_{i:02d}.fits', overwrite=True)

            write_ds9_regionfile(x_y_data,
                                 f'{output_folder}/centroids_{i:02d}.reg')

        # Visualization

        plt.figure(figsize=(10, 10))
        plt.imshow(observed_image, norm=LogNorm(), vmax=1E5)
        plt.scatter(x_y_data[:, 0],
                    x_y_data[:, 1],
                    marker="o",
                    lw=1,
                    color=None,
                    edgecolors='red')
        plt.title('observed')
        plt.colorbar()
        if output:
            plt.savefig(f'{output_folder}/obsverved_{i:02d}.png')

        plt.figure(figsize=(10, 10))
        plt.imshow(residual_image, norm=LogNorm(), vmax=1E5)
        plt.title('residual')
        plt.colorbar()
        if output:
            plt.savefig(f'{output_folder}/residual{i:02d}.png')

        plt.close('all')
コード例 #15
0
def export_visibility_to_fits(vis: Visibility, fits_file: str):

    hdu = HDUList([
        PrimaryHDU(),
        configuration_to_hdu(vis.configuration),
        visibility_to_hdu(vis)
    ])
    with open(fits_file, "w") as f:
        hdu.writeto(f, checksum=True)
コード例 #16
0
ファイル: test_input_types.py プロジェクト: samdf96/FilFinder
def test_HDU_input():

    hdu = PrimaryHDU(img, header=hdr)

    output = input_data(hdu)

    npt.assert_equal(img, output["data"].value)
    assert output['data'].unit == u.dimensionless_unscaled
    npt.assert_equal(hdr, output["header"])
コード例 #17
0
def test_priority():
    hdu = PrimaryHDU()
    hdu.header['SOURCEID'] = 'Target 1 SOURCEID'
    hdu.header['OBJECT'] = 'Target 1 OBJECT'
    assert _get_source_identifiers_by_hdu([hdu]) == ['Target 1 SOURCEID']

    hdu2 = PrimaryHDU()
    hdu2.header['SOURCEID'] = 'Target 2 SOURCEID'
    hdu2.header['OBJECT'] = 'Target 2 OBJECT'
    assert _get_source_identifiers_by_hdu(
        [hdu, hdu2]) == ['Target 1 SOURCEID', 'Target 2 SOURCEID']
コード例 #18
0
ファイル: test_input_types.py プロジェクト: samdf96/FilFinder
def test_HDU_input_withbunit():

    hdr['BUNIT'] = 'K'
    hdu = PrimaryHDU(img, header=hdr)

    output = input_data(hdu)

    npt.assert_equal(img, output["data"].value)
    assert output['data'].unit == u.K
    npt.assert_equal(hdr, output["header"])
コード例 #19
0
def northupeastleft(filename='', data=None, header=None):
    from astropy.io.fits import getdata, PrimaryHDU
    if filename:
        data, header = getdata(filename, header=True)
    if header['cd1_1'] > 0:
        data = np.fliplr(data)
        header['cd1_1'] *= -1
        print 'flipping around x'
    if header['cd2_2'] < 0:
        data = np.flipud(data)
        header['cd2_2'] *= -1
        print 'flipping around y'
    if filename:
        from os import remove
        remove(filename)
        out_fits = PrimaryHDU(data=data, header=header)
        out_fits.writeto(filename, clobber=True, output_verify='fix')
    else:
        return data, header
コード例 #20
0
def northupeastleft(filename='', data=None, header=None):
    from astropy.io.fits import getdata, PrimaryHDU
    if filename:
        data, header = getdata(filename, header=True)
    if header['cd1_1'] > 0:
        data = np.fliplr(data)
        header['cd1_1'] *= -1
        print 'flipping around x'
    if header['cd2_2'] < 0:
        data = np.flipud(data)
        header['cd2_2'] *= -1
        print 'flipping around y'
    if filename:
        from os import remove
        remove(filename)
        out_fits = PrimaryHDU(data=data, header=header)
        out_fits.writeto(filename, clobber=True, output_verify='fix')
    else:
        return data, header
コード例 #21
0
def generate():
    # TODO CHANGEME
    gauss_15 = make_convolved_grid(15,
                                   kernel=Gaussian2DKernel(x_stddev=2,
                                                           x_size=size,
                                                           y_size=size))
    PrimaryHDU(gauss_15).writeto(f'output_files/grid_gauss_15.fits',
                                 overwrite=True)
    gauss_16 = make_convolved_grid(16,
                                   kernel=Gaussian2DKernel(x_stddev=2,
                                                           x_size=size,
                                                           y_size=size))
    PrimaryHDU(gauss_16).writeto(f'output_files/grid_gauss_16.fits',
                                 overwrite=True)

    airy_15 = make_convolved_grid(15,
                                  kernel=AiryDisk2DKernel(radius=2,
                                                          x_size=size,
                                                          y_size=size))
    PrimaryHDU(airy_15).writeto(f'output_files/grid_airy_15.fits',
                                overwrite=True)
    airy_16 = make_convolved_grid(16,
                                  kernel=AiryDisk2DKernel(radius=2,
                                                          x_size=size,
                                                          y_size=size))
    PrimaryHDU(airy_16).writeto(f'output_files/grid_airy_16.fits',
                                overwrite=True)

    airy_16_perturbed = \
        make_convolved_grid(16, kernel=AiryDisk2DKernel(radius=2, x_size=size, y_size=size), perturbation=4.)
    PrimaryHDU(airy_16_perturbed).writeto(
        f'output_files/grid_airy_16_perturbed.fits', overwrite=True)

    airy_15_large = make_convolved_grid(15,
                                        kernel=AiryDisk2DKernel(radius=10,
                                                                x_size=size,
                                                                y_size=size))
    PrimaryHDU(airy_15_large).writeto(f'output_files/grid_airy_15_large.fits',
                                      overwrite=True)

    import anisocado
    hdus = anisocado.misc.make_simcado_psf_file([(0, 14)], [2.15],
                                                pixelSize=0.004,
                                                N=size)
    image = hdus[2]
    kernel = np.squeeze(image.data)
    anisocado_15 = make_convolved_grid(15, kernel=kernel, perturbation=0)
    PrimaryHDU(anisocado_15).writeto(f'output_files/grid_anisocado_15.fits',
                                     overwrite=True)
    anisocado_16 = make_convolved_grid(16, kernel=kernel, perturbation=1.2)
    PrimaryHDU(anisocado_16).writeto(f'output_files/grid_anisocado_16.fits',
                                     overwrite=True)
コード例 #22
0
 def _create_fits(self):
     hdul = HDUList(PrimaryHDU())
     h = hdul[0].header
     h.append(Card('name', self.name))
     self._cf_pre_hook(hdul)
     self._cf_add_setup_info(hdul)
     self._cf_post_setup_hook(hdul)
     self._cf_add_summary_statistics(hdul)
     self._cf_add_pipeline_steps(hdul)
     self._cf_post_hook(hdul)
     return hdul
コード例 #23
0
def run_bsmem_using_image(datafile: str,
                          outputfile: str,
                          dim: int,
                          pixelsize: float,
                          imagehdu: fits.PrimaryHDU,
                          uvmax: float = None,
                          alpha: float = None) -> None:
    """Run bsmem using initial/prior image.

    Args:
      datafile:   Input OIFITS data filename.
      outputfile: Output FITS filename.
      dim:        Reconstructed image width (pixels).
      pixelsize:  Reconstructed image pixel size (mas).
      imagehdu:   FITS HDU containing initial/prior image.
      uvmax:      Maximum uv radius to select (waves).
      alpha:      Regularization hyperparameter.

    """
    tempimage = tempfile.NamedTemporaryFile(suffix='.fits',
                                            mode='wb',
                                            delete=False)
    imagehdu.writeto(tempimage.name, overwrite=True)
    tempimage.close()
    args = [
        BSMEM, '--noui',
        '--data=%s' % datafile, '--clobber',
        '--output=%s' % outputfile,
        '--dim=%d' % dim,
        '--pixelsize=%f' % pixelsize,
        '--sf=%s' % tempimage.name
    ]
    if uvmax is not None:
        args += ['--uvmax=%f' % uvmax]
    if alpha is not None:
        args += ['--autoalpha=3', '--alpha=%f' % alpha]
    else:
        args += ['--autoalpha=4']
    fullstdout = os.path.splitext(outputfile)[0] + '-out.txt'
    run_bsmem(args, fullstdout)
    os.remove(tempimage.name)
コード例 #24
0
ファイル: test_image_dask.py プロジェクト: Gabriel-p/astropy
def test_long_header(dask_array_in_mem, tmp_path):

    # Make sure things work correctly if there is a long header in the HDU.

    filename = tmp_path / 'test.fits'

    # NOTE: we deliberately set up a long header here rather than add the
    # keys one by one to hdu.header as adding the header in one go used to
    # cause issues, so this acts as a regression test.
    header = fits.Header()
    for index in range(2048):
        header[f'KEY{index:x}'] = 0.

    hdu = PrimaryHDU(data=dask_array_in_mem, header=header)
    hdu.writeto(filename)

    with fits.open(filename) as hdulist_new:
        assert len(hdulist_new[0].header) == 2053
        assert isinstance(hdulist_new[0].data, np.ndarray)
        np.testing.assert_allclose(hdulist_new[0].data,
                                   dask_array_in_mem.compute())
コード例 #25
0
ファイル: test_connect.py プロジェクト: kgc85/astropy
    def setup_class(self):
        self.data1 = np.array(list(
            zip([1, 2, 3, 4], ['a', 'b', 'c', 'd'], [2.3, 4.5, 6.7, 8.9])),
                              dtype=[('a', int), ('b', 'U1'), ('c', float)])
        self.data2 = np.array(list(
            zip([1.4, 2.3, 3.2, 4.7], [2.3, 4.5, 6.7, 8.9])),
                              dtype=[('p', float), ('q', float)])
        hdu1 = PrimaryHDU()
        hdu2 = BinTableHDU(self.data1, name='first')
        hdu3 = BinTableHDU(self.data2, name='second')

        self.hdus = HDUList([hdu1, hdu2, hdu3])
コード例 #26
0
ファイル: trim.py プロジェクト: rpanman/desimodel
def trim_psf(indir, outdir, filename):
    assert os.path.abspath(indir) != os.path.abspath(outdir)
    infile = os.path.join(indir, filename)
    outfile = os.path.join(outdir, filename)

    fx = fits.open(infile)
    hdus = HDUList()

    #- HDU 0 XCOEFF - data unchanged but update keywords for less samples
    xcoeff = fx[0].data
    hdr = fx[0].header
    hdr['NWAVE'] = 3    #- down from 11
    hdr['CRPIX1'] = 23  #- 23=45//2+1, down from 113=225//2+1
    hdr['CRPIX1'] = 23
    hdr['CDELT1'] = 0.005   #- 5mm instead of 1mm
    hdr['CDELT2'] = 0.005   #- 5mm instead of 1mm
    hdr['PIXSIZE'] = 0.005   #- 5mm instead of 1mm

    hdus.append(PrimaryHDU(xcoeff, header=hdr))
    hdus.append(fx['YCOEFF'])

    #- subsample spots
    inspots = fx['SPOTS'].data
    spots = np.zeros((3,3,45,45))
    spots[0,0] = rebin_image(inspots[0,0], 5)
    spots[1,0] = rebin_image(inspots[5,0], 5)
    spots[2,0] = rebin_image(inspots[10,0], 5)
    spots[0,1] = rebin_image(inspots[0,5], 5)
    spots[1,1] = rebin_image(inspots[5,5], 5)
    spots[2,1] = rebin_image(inspots[10,5], 5)
    spots[0,2] = rebin_image(inspots[0,10], 5)
    spots[1,2] = rebin_image(inspots[5,10], 5)
    spots[2,2] = rebin_image(inspots[10,10], 5)
    hdus.append(ImageHDU(spots, header=fx['SPOTS'].header))

    #- subsample spots x,y locations
    dx = fx['SPOTX'].data
    hdus.append(ImageHDU(dx[::5, ::5], header=fx['SPOTX'].header))
    dy = fx['SPOTY'].data
    hdus.append(ImageHDU(dy[::5, ::5], header=fx['SPOTY'].header))

    #- Fiberpos unchanged
    hdus.append(fx['FIBERPOS'])

    #- Subsample SPOTPOS and SPOTWAVE
    d = fx['SPOTPOS'].data
    hdus.append(ImageHDU(d[::5], header=fx['SPOTPOS'].header))
    d = fx['SPOTWAVE'].data
    hdus.append(ImageHDU(d[::5], header=fx['SPOTWAVE'].header))

    hdus.writeto(outfile, clobber=True)
    fx.close()
コード例 #27
0
    def save_fits(self, folder: str) -> str:
        data = self.data.copy()

        assert data.dtype == np.float32 and data.max() <= 1.0 and data.min() >= 0.0, f"{data.dtype} {data.max()} {data.min()}"

        hdu = PrimaryHDU(
            data=self.data,
            header=self.fits_header,
        )
        l = HDUList([hdu])
        path = join(folder, f"{self.key}.fits")
        l.writeto(path, overwrite=True)
        return path
コード例 #28
0
ファイル: fits.py プロジェクト: Luke-Ludwig/DRAGONS
def _prepare_hdulist(hdulist, default_extension='SCI', extname_parser=None):
    new_list = []
    highest_ver = 0
    recognized = set()

    if len(hdulist) > 1 or (len(hdulist) == 1 and hdulist[0].data is None):
        # MEF file
        # First get HDUs for which EXTVER is defined
        for n, hdu in enumerate(hdulist):
            if extname_parser:
                extname_parser(hdu)
            ver = hdu.header.get('EXTVER')
            if ver not in (-1, None) and hdu.name:
                highest_ver = max(highest_ver, ver)
            elif not isinstance(hdu, PrimaryHDU):
                continue

            new_list.append(hdu)
            recognized.add(hdu)

        # Then HDUs that miss EXTVER
        for hdu in hdulist:
            if hdu in recognized:
                continue
            elif isinstance(hdu, ImageHDU):
                highest_ver += 1
                if 'EXTNAME' not in hdu.header:
                    hdu.header['EXTNAME'] = (default_extension,
                                             'Added by AstroData')
                if hdu.header.get('EXTVER') in (-1, None):
                    hdu.header['EXTVER'] = (highest_ver, 'Added by AstroData')

            new_list.append(hdu)
            recognized.add(hdu)
    else:
        # Uh-oh, a single image FITS file
        new_list.append(PrimaryHDU(header=hdulist[0].header))
        image = ImageHDU(header=hdulist[0].header, data=hdulist[0].data)
        # Fudge due to apparent issues with assigning ImageHDU from data
        image._orig_bscale = hdulist[0]._orig_bscale
        image._orig_bzero = hdulist[0]._orig_bzero

        for keyw in ('SIMPLE', 'EXTEND'):
            if keyw in image.header:
                del image.header[keyw]
        image.header['EXTNAME'] = (default_extension, 'Added by AstroData')
        image.header['EXTVER'] = (1, 'Added by AstroData')
        new_list.append(image)

    return HDUList(sorted(new_list, key=fits_ext_comp_key))
コード例 #29
0
def test_OBJECT():
    hdu = PrimaryHDU()
    hdu.header['OBJECT'] = 'Target 1 OBJECT'
    assert _get_source_identifiers_by_hdu([hdu]) == ['Target 1 OBJECT']

    hdu2 = PrimaryHDU()
    hdu2.header['OBJECT'] = 'Target 2 OBJECT'
    assert _get_source_identifiers_by_hdu(
        [hdu, hdu2]) == ['Target 1 OBJECT', 'Target 2 OBJECT']
コード例 #30
0
def test_SOURCEID():
    hdu = PrimaryHDU()
    hdu.header['SOURCEID'] = 'Target 1 SOURCEID'
    assert _get_source_identifiers_by_hdu([hdu]) == ['Target 1 SOURCEID']

    hdu2 = PrimaryHDU()
    hdu2.header['SOURCEID'] = 'Target 2 SOURCEID'
    assert _get_source_identifiers_by_hdu(
        [hdu, hdu2]) == ['Target 1 SOURCEID', 'Target 2 SOURCEID']
コード例 #31
0
def pv_wedge(cube, center, length, min_theta, max_theta, ntheta=90, width=1):
    '''
    Create a PV slice from a wedge.
    '''

    y0, x0 = center

    thetas = np.linspace(min_theta, max_theta, ntheta)

    pv_slices = []

    for i, theta in enumerate(thetas):

        start_pt = (y0 - (length / 2.) * np.sin(theta),
                    x0 - (length / 2.) * np.cos(theta))

        end_pt = (y0 + (length / 2.) * np.sin(theta),
                  x0 + (length / 2.) * np.cos(theta))

        path = Path([start_pt, end_pt], width=width)

        if i == 0:
            pv_slice = extract_pv_slice(cube, path)
            pv_path_slice, header = pv_slice.data, pv_slice.header
        else:
            pv_path_slice = extract_pv_slice(cube, path).data

        pv_slices.append(pv_path_slice)

        # Track the smallest shape
        if i == 0:
            path_length = pv_path_slice.shape[1]
        else:
            new_path_length = pv_path_slice.shape[1]
            if new_path_length < path_length:
                path_length = new_path_length

    header["NAXIS1"] = path_length

    # Now loop through and average together
    avg_pvslice = np.zeros((cube.shape[0], path_length), dtype='float')

    for pvslice in pv_slices:
        avg_pvslice += pvslice[:, :path_length]

    avg_pvslice /= float(ntheta)

    return PrimaryHDU(avg_pvslice, header=header)
コード例 #32
0
ファイル: test_image_dask.py プロジェクト: Gabriel-p/astropy
def test_file_handle(mode, dask_array_in_mem, tmp_path):

    filename = tmp_path / 'test.fits'
    hdu1 = PrimaryHDU(data=dask_array_in_mem)
    hdu2 = ImageHDU(data=np.arange(10))
    hdulist = fits.HDUList([hdu1, hdu2])

    with filename.open(mode=mode) as fp:
        hdulist.writeto(fp)

    with fits.open(filename) as hdulist_new:
        assert isinstance(hdulist_new[0].data, np.ndarray)
        np.testing.assert_allclose(hdulist_new[0].data,
                                   dask_array_in_mem.compute())
        assert isinstance(hdulist_new[1].data, np.ndarray)
        np.testing.assert_allclose(hdulist_new[1].data, np.arange(10))
コード例 #33
0
    def writeFits(self, filename):
        """Write to FITS file

        This API is intended for use by the LSST data butler, which handles
        translating the desired identity into a filename.

        Parameters
        ----------
        filename : `str`
            Filename of FITS file.
        """
        from astropy.io.fits import HDUList, PrimaryHDU
        fits = HDUList()
        fits.append(PrimaryHDU())
        self._writeImpl(fits)
        with open(filename, "wb") as fd:
            fits.writeto(fd)