def reduce_all_cubes_for_map(mapname, lowhigh='high', **kwargs): for dataset,maps in make_apex_cubes.datasets_2014.items(): if mapname in maps: date = dataset[-10:] both_directions = True try: make_apex_cubes.build_cube_2014(mapname, lowhigh=lowhigh, posang=[50,70], datasets=[dataset], extra_suffix='_cal{0}_lscans'.format(date), **kwargs) except IndexError: both_directions = False try: make_apex_cubes.build_cube_2014(mapname, lowhigh=lowhigh, posang=[140,160], datasets=[dataset], extra_suffix='_cal{0}_bscans'.format(date), **kwargs) except IndexError: both_directions = False if both_directions: fileb = os.path.join(outdir, 'APEX_H2CO_2014_{1}_{2}_cal{0}_bscans.fits'.format(date, mapname, lowhigh)) filel = os.path.join(outdir, 'APEX_H2CO_2014_{1}_{2}_cal{0}_lscans.fits'.format(date, mapname, lowhigh)) cubeb = fits.getdata(fileb) cubel = fits.getdata(filel) if cubeb.shape != cubel.shape: header = FITS_tools.fits_overlap(fileb, filel) hdb = fits.getheader(fileb) # Add back 3rd dimension... HACK for key in hdb: if key[0] == 'C' and key.strip()[-1] == '3': header[key] = hdb[key] FITS_tools.regrid_fits_cube(fileb, outheader=header, outfilename=fileb, clobber=True) FITS_tools.regrid_fits_cube(filel, outheader=header, outfilename=filel, clobber=True) cubeb = fits.getdata(fileb) cubel = fits.getdata(filel) if cubeb.shape != cubel.shape: log.fatal("Cube shapes don't match: {0}, {1}".format(cubeb.shape,cubel.shape)) raise ValueError cube_comb = plait.plait_cube([cubeb,cubel], angles=[0, 90], scale=5) cube_comb_naive = (cubeb+cubel)/2. header = fits.getheader(fileb) fits.PrimaryHDU(data=cube_comb, header=header).writeto(os.path.join(outdir, '{1}_{2}_cal{0}_plait.fits'.format(date, mapname, lowhigh)), clobber=True)
def reduce_all_cubes_for_map(mapname, lowhigh='high', **kwargs): for dataset, maps in make_apex_cubes.datasets_2014.items(): if mapname in maps: date = dataset[-10:] both_directions = True try: make_apex_cubes.build_cube_2014( mapname, lowhigh=lowhigh, posang=[50, 70], datasets=[dataset], extra_suffix='_cal{0}_lscans'.format(date), **kwargs) except IndexError: both_directions = False try: make_apex_cubes.build_cube_2014( mapname, lowhigh=lowhigh, posang=[140, 160], datasets=[dataset], extra_suffix='_cal{0}_bscans'.format(date), **kwargs) except IndexError: both_directions = False if both_directions: fileb = os.path.join( outdir, 'APEX_H2CO_2014_{1}_{2}_cal{0}_bscans.fits'.format( date, mapname, lowhigh)) filel = os.path.join( outdir, 'APEX_H2CO_2014_{1}_{2}_cal{0}_lscans.fits'.format( date, mapname, lowhigh)) cubeb = fits.getdata(fileb) cubel = fits.getdata(filel) if cubeb.shape != cubel.shape: header = FITS_tools.fits_overlap(fileb, filel) hdb = fits.getheader(fileb) # Add back 3rd dimension... HACK for key in hdb: if key[0] == 'C' and key.strip()[-1] == '3': header[key] = hdb[key] FITS_tools.regrid_fits_cube(fileb, outheader=header, outfilename=fileb, clobber=True) FITS_tools.regrid_fits_cube(filel, outheader=header, outfilename=filel, clobber=True) cubeb = fits.getdata(fileb) cubel = fits.getdata(filel) if cubeb.shape != cubel.shape: log.fatal("Cube shapes don't match: {0}, {1}".format( cubeb.shape, cubel.shape)) raise ValueError cube_comb = plait.plait_cube([cubeb, cubel], angles=[0, 90], scale=5) cube_comb_naive = (cubeb + cubel) / 2. header = fits.getheader(fileb) fits.PrimaryHDU(data=cube_comb, header=header).writeto(os.path.join( outdir, '{1}_{2}_cal{0}_plait.fits'.format( date, mapname, lowhigh)), clobber=True)
import FITS_tools from astropy.io import fits import paths neiir = FITS_tools.regrid_cube_hdu(fits.open(paths.dpath('w51.neii.fits'))[0], outheader=fits.Header.fromtextfile(paths.dpath('w51.neii.square.hdr'))) neiir.writeto(paths.dpath('w51.neii.square.fits')) sivr = FITS_tools.regrid_cube_hdu(fits.open(paths.dpath('w51.siv.fits'))[0], outheader=fits.Header.fromtextfile(paths.dpath('w51.siv.square.hdr'))) sivr.writeto(paths.dpath('w51.siv.square.fits'))
def match_regrid(filename1, filename2, return_type='hdu', reappend_dim=True, remove_hist=True, save_output=False, save_name='new_img'): ''' Input two fits filenames. The output will be the projection of file 1 onto file 2 ''' fits1 = fits.open(filename1) fits2 = fits.open(filename2) hdr1 = fits1[0].header.copy() hdr2 = fits2[0].header.copy() # new_wcs = WCS(hdr2) # new_wcs = drop_axis(new_wcs, 3) # hdr2 = new_wcs.to_header() hdr2["CUNIT4"] = 'km/s ' hdr2["CRVAL4"] = -48.1391 hdr2["CTYPE4"] = 'VELO-LSR' hdr2["CDELT4"] = -1.288141 # hdr2["CDELT1"] = hdr2["CDELT1"] * (4096/256) # hdr2["CDELT2"] = hdr2["CDELT2"] * (4096/256) # hdr2["CDELT4"] = hdr2["CDELT4"] * (205/40) # hdr2["NAXIS1"] = 4096 # hdr2["NAXIS2"] = 4096 # hdr2["NAXIS4"] = 40 # hdr2 = "SIMPLE = T /Standard FITS BITPIX = -32 /Floating point (32 bit) NAXIS = 3 NAXIS1 = 256 NAXIS2 = 256 NAXIS3 = 205 EXTEND = T BSCALE = 1.000000000000E+00 BZERO = 0.000000000000E+00 BMAJ = 1.755010949241E-03 BMIN = 1.572879685296E-03 BPA = 8.867265319824E+01 BTYPE = 'Intensity' OBJECT = 'M33P1 ' BUNIT = 'JY/BEAM ' /Brightness (pixel) unit EQUINOX = 2.000000000000E+03 RADESYS = 'FK5 ' LONPOLE = 1.800000000000E+02 LATPOLE = 3.065994166667E+01 PC01_01 = 1.000000000000E+00 PC02_01 = 0.000000000000E+00 PC03_01 = 0.000000000000E+00 PC01_02 = 0.000000000000E+00 PC02_02 = 1.000000000000E+00 PC03_02 = 0.000000000000E+00 PC01_03 = 0.000000000000E+00 PC02_03 = 0.000000000000E+00 PC03_03 = 1.000000000000E+00 CTYPE1 = 'RA---SIN' CRVAL1 = 2.346210000000E+01 CDELT1 = -4.166666666667E-04 CRPIX1 = 2.049000000000E+03 CUNIT1 = 'deg ' CTYPE2 = 'DEC--SIN' CRVAL2 = 3.065994166667E+01 CDELT2 = 4.166666666667E-04 CRPIX2 = 2.049000000000E+03 CUNIT2 = 'deg ' CTYPE3 = 'VELO-LSR' CRVAL3 = -48.1391 CDELT3 = 6.103165421963E+03 CRPIX3 = 1.000000000000E+00 CUNIT3 = 'km/s ' PV2_1 = 0.000000000000E+00 PV2_2 = 0.000000000000E+00 RESTFRQ = 1.420405751770E+09 /Rest Frequency (Hz) SPECSYS = 'BARYCENT' /Spectral reference frame ALTRVAL = -4.942706867471E+04 /Alternate frequency reference value ALTRPIX = 1.000000000000E+00 /Alternate frequency reference pixel VELREF = 258 /1 LSR, 2 HEL, 3 OBS, +256 Radio COMMENT casacore non-standard usage: 4 LSD, 5 GEO, 6 SOU, 7 GAL TELESCOP= 'VLA ' OBSERVER= 'unavailable' DATE-OBS= '1997-09-19T04:59:50.000002' TIMESYS = 'TAI ' OBSRA = 2.346210000000E+01 OBSDEC = 3.065994166667E+01 OBSGEO-X= -1.601185365000E+06 OBSGEO-Y= -5.041977547000E+06 OBSGEO-Z= 3.554875870000E+06 DATE = '2014-12-17T20:03:37.733514' /Date FITS file was written ORIGIN = 'CASA 4.2.2 (prerelease r30986)' END " # hdr2 = fits.header.Header.fromstring(hdr2) if remove_hist: # Remove the huge CASA history del hdr2["HISTORY"] shape1 = fits1[0].data.shape shape2 = fits2[0].data.shape[:-1] fits2.close() # We need to alter the header to make them compatible # if len(shape1) < len(shape2): # hdr2["NAXIS"] = len(shape1) # del_keys = ["NAXIS", "CTYPE", "CDELT", "CRPIX", "CUNIT", "CRVAL"] # extra_axes = \ # [posn + 1 for posn, val in enumerate(shape2[::-1]) if val == 1] # if reappend_dim: # deleted_keys = {} # for ax in extra_axes: # for del_key in del_keys: # if reappend_dim: # deleted_keys[del_key+str(ax)] = hdr2[del_key+str(ax)] # del hdr2[del_key+str(ax)] # Do the matching if len(shape1) == 2: regrid_img = ft.hcongrid.hcongrid(fits1[0].data, fits1[0].header, hdr2) else: regrid_img = ft.regrid_cube(fits1[0].data, fits1[0].header, hdr2, specaxes=(3, 3)) # regrid_img = reproject(fits1[0], hdr2, shape_out=(205, 256, 256))[0] # Now hack the header back together! # if reappend_dim and len(shape1) < len(shape2): # for key in deleted_keys: # hdr2[key] = deleted_keys[key] # hdr2["NAXIS"] = len(shape2) # for _ in range(len(extra_axes)): # regrid_img = regrid_img[np.newaxis] # Finally, we want to take out the important portions of fits1 header # hdr2["TELESCOPE"] = hdr1["TELESCOPE"] # hdr2["DATE-OBS"] = hdr1["DATE-OBS"] # hdr2["DATAMAX"] = hdr1["DATAMAX"] # hdr2["DATAMIN"] = hdr1["DATAMIN"] # hdr2["OBSERVER"] = hdr1["OBSERVER"] # hdr2["OBJECT"] = hdr1["OBJECT"] # hdr2["ORIGIN"] = hdr1["ORIGIN"] # hdr2["BMAJ"] = hdr1["BMAJ"] # hdr2["BMIN"] = hdr1["BMIN"] # hdr2["BPA"] = hdr1["BPA"] if save_output: hdu = fits.PrimaryHDU(regrid_img, header=hdr2) hdu.writeto(save_name + ".fits") else: return fits.PrimaryHDU(regrid_img, header=hdr2)
if not os.path.isdir('../data/spectra/'): os.mkdir('../data/spectra/') if not os.path.isdir('../data/spectra/apex/'): os.mkdir('../data/spectra/apex/') for cubefilename in cubefilenames: cube31 = SpectralCube.read(cubefilename) cube32filename = cubefilename.replace("31-30","32-31") if os.path.isfile(cube32filename): cube32 = SpectralCube.read(cube32filename) else: print("{0} does not exist, skipping".format(cube32filename)) if cube31.shape != cube32.shape: cube32data = FITS_tools.regrid_cube_hdu(cube32.hdu, cube31.header).data if np.all(np.isnan(cube32data)): print("{0} and {1} do not overlap".format(cubefilename, cube32filename)) continue else: cube32data = cube32.filled_data[:] meancubedata = (cube31.filled_data[:] + cube32data)/2. hdu = cube31.hdu hdu.data = meancubedata.value outcubename = cubefilename.replace("31-30", "averaged").replace(".lmv",".fits") hdu.writeto(outcubename, clobber=True) meancubedata = pyspeckit.cubes.spectral_smooth(meancubedata.value, 5, downsample=True) hdu.data = meancubedata hdu.header['CDELT3'] = hdu.header.get('CDELT3') * float(5)
import FITS_tools import scipy.stats from sdpy import plait from reduce_map_all import reduce_all_cubes_for_map from astropy import log from collections import defaultdict for lowhigh in ('low', 'high'): reduce_all_cubes_for_map('MAP_006', lowhigh=lowhigh, calibration_factors=defaultdict(lambda: 1)) files = glob.glob(os.path.join(paths.april2014path, '*MAP_006*scans.fits')) for fn1, fn2 in itertools.combinations(files, 2): header = FITS_tools.fits_overlap(fn1, fn2) hd = fits.getheader(fn1) # Add back 3rd dimension... HACK for key in hd: if key[0] == 'C' and key.strip()[-1] == '3': header[key] = hd[key] for fn in files: FITS_tools.regrid_fits_cube(fn, outheader=header, outfilename=fn, clobber=True) cube530b = fits.getdata( os.path.join(
#'LimaBean_H213CO22_cube_sub_smoothtoCband.fits', 'LimaBean_H2C18O22_cube_sub.fits', #'LimaBean_H2C18O22_cube_sub_smoothtoCband.fits', ] # TODO: Where can these be got? ccontfile = datapath+'GCCBand_lb.2.fits' # First do with Casey Law's images as the background if os.path.exists(ccontfile): for fn in fivecubes: reproj_contfile = datapath+"GCCBand_reproj.fits" header = FITS_tools.strip_headers.flatten_header(fits.getheader(datapath+fivecubes[0])) ccont = FITS_tools.project_to_header(ccontfile, header) * u.Jy cbfreq = 4.829 * u.GHz gbbeam_5ghz = 1.22 * ((cbfreq.to(u.m, u.spectral()) / (100*u.m)) * u.rad).decompose() fwhm = np.sqrt(8*np.log(2)) bmaj=4.2413E-02*u.deg bmin=4.2413E-02*u.deg ktojy5ghz = (1*u.K).to(u.Jy,u.brightness_temperature((2*np.pi*(bmaj*bmin/fwhm**2)), cbfreq)) cont_K = (ccont/ktojy5ghz).value outfile = fits.PrimaryHDU(data=cont_K, header=header) outfile.writeto(reproj_contfile,clobber=True) continuum = cont_K suffix = '_claw.fits'
def match_regrid(filename1, filename2, return_type='hdu', reappend_dim=True, remove_hist=True, save_output=False, save_name='new_img'): ''' Input two fits filenames. The output will be the projection of file 1 onto file 2 ''' fits1 = fits.open(filename1) fits2 = fits.open(filename2) hdr1 = fits1[0].header.copy() hdr2 = fits2[0].header.copy() # new_wcs = WCS(hdr2) # new_wcs = drop_axis(new_wcs, 3) # hdr2 = new_wcs.to_header() hdr2["CUNIT4"] = 'km/s ' hdr2["CRVAL4"] = -48.1391 hdr2["CTYPE4"] = 'VELO-LSR' hdr2["CDELT4"] = -1.288141 # hdr2["CDELT1"] = hdr2["CDELT1"] * (4096/256) # hdr2["CDELT2"] = hdr2["CDELT2"] * (4096/256) # hdr2["CDELT4"] = hdr2["CDELT4"] * (205/40) # hdr2["NAXIS1"] = 4096 # hdr2["NAXIS2"] = 4096 # hdr2["NAXIS4"] = 40 # hdr2 = "SIMPLE = T /Standard FITS BITPIX = -32 /Floating point (32 bit) NAXIS = 3 NAXIS1 = 256 NAXIS2 = 256 NAXIS3 = 205 EXTEND = T BSCALE = 1.000000000000E+00 BZERO = 0.000000000000E+00 BMAJ = 1.755010949241E-03 BMIN = 1.572879685296E-03 BPA = 8.867265319824E+01 BTYPE = 'Intensity' OBJECT = 'M33P1 ' BUNIT = 'JY/BEAM ' /Brightness (pixel) unit EQUINOX = 2.000000000000E+03 RADESYS = 'FK5 ' LONPOLE = 1.800000000000E+02 LATPOLE = 3.065994166667E+01 PC01_01 = 1.000000000000E+00 PC02_01 = 0.000000000000E+00 PC03_01 = 0.000000000000E+00 PC01_02 = 0.000000000000E+00 PC02_02 = 1.000000000000E+00 PC03_02 = 0.000000000000E+00 PC01_03 = 0.000000000000E+00 PC02_03 = 0.000000000000E+00 PC03_03 = 1.000000000000E+00 CTYPE1 = 'RA---SIN' CRVAL1 = 2.346210000000E+01 CDELT1 = -4.166666666667E-04 CRPIX1 = 2.049000000000E+03 CUNIT1 = 'deg ' CTYPE2 = 'DEC--SIN' CRVAL2 = 3.065994166667E+01 CDELT2 = 4.166666666667E-04 CRPIX2 = 2.049000000000E+03 CUNIT2 = 'deg ' CTYPE3 = 'VELO-LSR' CRVAL3 = -48.1391 CDELT3 = 6.103165421963E+03 CRPIX3 = 1.000000000000E+00 CUNIT3 = 'km/s ' PV2_1 = 0.000000000000E+00 PV2_2 = 0.000000000000E+00 RESTFRQ = 1.420405751770E+09 /Rest Frequency (Hz) SPECSYS = 'BARYCENT' /Spectral reference frame ALTRVAL = -4.942706867471E+04 /Alternate frequency reference value ALTRPIX = 1.000000000000E+00 /Alternate frequency reference pixel VELREF = 258 /1 LSR, 2 HEL, 3 OBS, +256 Radio COMMENT casacore non-standard usage: 4 LSD, 5 GEO, 6 SOU, 7 GAL TELESCOP= 'VLA ' OBSERVER= 'unavailable' DATE-OBS= '1997-09-19T04:59:50.000002' TIMESYS = 'TAI ' OBSRA = 2.346210000000E+01 OBSDEC = 3.065994166667E+01 OBSGEO-X= -1.601185365000E+06 OBSGEO-Y= -5.041977547000E+06 OBSGEO-Z= 3.554875870000E+06 DATE = '2014-12-17T20:03:37.733514' /Date FITS file was written ORIGIN = 'CASA 4.2.2 (prerelease r30986)' END " # hdr2 = fits.header.Header.fromstring(hdr2) if remove_hist: # Remove the huge CASA history del hdr2["HISTORY"] shape1 = fits1[0].data.shape shape2 = fits2[0].data.shape[:-1] fits2.close() # We need to alter the header to make them compatible # if len(shape1) < len(shape2): # hdr2["NAXIS"] = len(shape1) # del_keys = ["NAXIS", "CTYPE", "CDELT", "CRPIX", "CUNIT", "CRVAL"] # extra_axes = \ # [posn + 1 for posn, val in enumerate(shape2[::-1]) if val == 1] # if reappend_dim: # deleted_keys = {} # for ax in extra_axes: # for del_key in del_keys: # if reappend_dim: # deleted_keys[del_key+str(ax)] = hdr2[del_key+str(ax)] # del hdr2[del_key+str(ax)] # Do the matching if len(shape1) == 2: regrid_img = ft.hcongrid.hcongrid(fits1[0].data, fits1[0].header, hdr2) else: regrid_img = ft.regrid_cube(fits1[0].data, fits1[0].header, hdr2, specaxes=(3, 3)) # regrid_img = reproject(fits1[0], hdr2, shape_out=(205, 256, 256))[0] # Now hack the header back together! # if reappend_dim and len(shape1) < len(shape2): # for key in deleted_keys: # hdr2[key] = deleted_keys[key] # hdr2["NAXIS"] = len(shape2) # for _ in range(len(extra_axes)): # regrid_img = regrid_img[np.newaxis] # Finally, we want to take out the important portions of fits1 header # hdr2["TELESCOPE"] = hdr1["TELESCOPE"] # hdr2["DATE-OBS"] = hdr1["DATE-OBS"] # hdr2["DATAMAX"] = hdr1["DATAMAX"] # hdr2["DATAMIN"] = hdr1["DATAMIN"] # hdr2["OBSERVER"] = hdr1["OBSERVER"] # hdr2["OBJECT"] = hdr1["OBJECT"] # hdr2["ORIGIN"] = hdr1["ORIGIN"] # hdr2["BMAJ"] = hdr1["BMAJ"] # hdr2["BMIN"] = hdr1["BMIN"] # hdr2["BPA"] = hdr1["BPA"] if save_output: hdu = fits.PrimaryHDU(regrid_img, header=hdr2) hdu.writeto(save_name+".fits") else: return fits.PrimaryHDU(regrid_img, header=hdr2)
import itertools from astropy.io import fits import FITS_tools import scipy.stats from sdpy import plait from reduce_map_all import reduce_all_cubes_for_map from astropy import log from collections import defaultdict for lowhigh in ('low','high'): reduce_all_cubes_for_map('MAP_006', lowhigh=lowhigh, calibration_factors=defaultdict(lambda: 1)) files = glob.glob(os.path.join(paths.april2014path, '*MAP_006*scans.fits')) for fn1,fn2 in itertools.combinations(files, 2): header = FITS_tools.fits_overlap(fn1, fn2) hd = fits.getheader(fn1) # Add back 3rd dimension... HACK for key in hd: if key[0] == 'C' and key.strip()[-1] == '3': header[key] = hd[key] for fn in files: FITS_tools.regrid_fits_cube(fn, outheader=header, outfilename=fn, clobber=True) cube530b = fits.getdata(os.path.join(paths.april2014path, 'APEX_H2CO_2014_MAP_006_{0}_cal2014-05-30_bscans.fits'.format(lowhigh))) cube530l = fits.getdata(os.path.join(paths.april2014path, 'APEX_H2CO_2014_MAP_006_{0}_cal2014-05-30_lscans.fits'.format(lowhigh))) cube730b = fits.getdata(os.path.join(paths.april2014path, 'APEX_H2CO_2014_MAP_006_{0}_cal2014-07-31_bscans.fits'.format(lowhigh))) cube730l = fits.getdata(os.path.join(paths.april2014path, 'APEX_H2CO_2014_MAP_006_{0}_cal2014-07-30_lscans.fits'.format(lowhigh)))
import FITS_tools from astropy.io import fits import paths neiir = FITS_tools.regrid_cube_hdu(fits.open(paths.dpath('w51.neii.fits'))[0], outheader=fits.Header.fromtextfile( paths.dpath('w51.neii.square.hdr'))) neiir.writeto(paths.dpath('w51.neii.square.fits')) sivr = FITS_tools.regrid_cube_hdu(fits.open(paths.dpath('w51.siv.fits'))[0], outheader=fits.Header.fromtextfile( paths.dpath('w51.siv.square.hdr'))) sivr.writeto(paths.dpath('w51.siv.square.fits'))