示例#1
0
def reproject_images(template_header,
                     input_dir,
                     reprojected_dir,
                     imtype,
                     whole=False,
                     exact=True,
                     img_list=None):

    input_table = os.path.join(input_dir, imtype + '_input.tbl')
    montage.mImgtbl(input_dir, input_table, corners=True, img_list=img_list)

    # Create reprojection directory, reproject, and get image metadata
    stats_table = os.path.join(reprojected_dir,
                               imtype + '_mProjExec_stats.log')
    montage.mProjExec(input_table,
                      template_header,
                      reprojected_dir,
                      stats_table,
                      raw_dir=input_dir,
                      whole=whole,
                      exact=exact)

    reprojected_table = os.path.join(reprojected_dir,
                                     imtype + '_reprojected.tbl')
    montage.mImgtbl(reprojected_dir, reprojected_table, corners=True)
def reproject_images(template_header, int_images, rrhr_images, flag_images, input_dir, reprojected_dir, whole=True, exact=True):

    # MASK IMAGES
    for i in range(len(int_images)):
        image_infile = int_images[i]
        wt_infile = rrhr_images[i]
        flg_infile = flag_images[i]

        image_outfile = os.path.join(input_dir, os.path.basename(image_infile).replace('.fits', '_masked.fits'))
        wt_outfile = os.path.join(input_dir, os.path.basename(wt_infile).replace('.fits', '_masked.fits'))

        #mask_galex_edges(image_infile, flg_infile, outfile=image_outfile)
        #mask_galex_edges(wt_infile, flg_infile, outfile=wt_outfile)
        mask_galex(image_infile, wt_infile, flg_infile, out_intfile=image_outfile, out_wtfile=wt_outfile)


    # REPROJECT IMAGES
    input_table = os.path.join(input_dir, 'input.tbl')
    montage.mImgtbl(input_dir, input_table, corners=True)

    # Create reprojection directory, reproject, and get image metadata
    #whole = True #if background_match else False
    stats_table = os.path.join(reprojected_dir, 'mProjExec_stats.log')

    montage.mProjExec(input_table, template_header, reprojected_dir, stats_table, raw_dir=input_dir, whole=whole, exact=exact)

    reprojected_table = os.path.join(reprojected_dir, 'reprojected.tbl')
    montage.mImgtbl(reprojected_dir, reprojected_table, corners=True)
示例#3
0
def create_table(in_dir, dir_type=None):
    if dir_type is None:
        reprojected_table = os.path.join(in_dir, 'reprojected.tbl')
    else:
        reprojected_table = os.path.join(in_dir, dir_type + '_reprojected.tbl')
    montage.mImgtbl(in_dir, reprojected_table, corners=True)
    return reprojected_table
def create_tables(in_dir, img_list=None, table_type=None):
    if table_type is None:
        tbl = 'reprojected.tbl'
    else:
        tbl = table_type + '_reprojected.tbl'
    reprojected_table = os.path.join(in_dir, tbl)
    montage.mImgtbl(in_dir, reprojected_table, corners=True, img_list=img_list)
    return reprojected_table
示例#5
0
def reproject_images(template_header,
                     input_dir,
                     reproj_dir,
                     imtype,
                     whole=True,
                     exact=True,
                     corners=True,
                     img_list=None):
    """
    Reproject input images to a new WCS as given by a template header

    Parameters
    ----------
    template_header : ascii file
        ASCII file containing the WCS to which you want to reproject. This is what Montage requires.
    input_dir : str
        Path to directory containing input data
    reproj_imtype_dir : 
        Path to new directory for storing reprojected data
    imtype : str
        The type of image you are reprojecting; one of [int, rrhr]
    whole : bool, optional
        Montage argument: Force reprojection of whole images, even if they exceed the area of the FITS 
        header template (Default: True)
    exact : bool, optional
        Montage argument: Flag indicating output image should exactly match the FITS header template, 
        and not crop off blank pixels (Default: True)
    corners : bool, optional
        Montage argument: Adds 8 columns for the RA and Dec of the image corners to the output metadata table 
        (Default: True)
    img_list : list of strs, optional 
        Montage argument: only process files with names specified in table img_list, ignoring any other files
        in the directory. (Default: None)
    """

    # get image metadata from input images
    input_table = os.path.join(input_dir, imtype + '_input.tbl')
    montage.mImgtbl(input_dir, input_table, corners=corners, img_list=img_list)

    # reproject images
    stats_table = os.path.join(reproj_dir, imtype + '_mProjExec_stats.log')
    montage.mProjExec(input_table,
                      template_header,
                      reproj_dir,
                      stats_table,
                      raw_dir=input_dir,
                      whole=whole,
                      exact=exact)

    # get new image metadata with new header information
    reprojected_table = os.path.join(reproj_dir, imtype + '_reprojected.tbl')
    montage.mImgtbl(reproj_dir, reprojected_table, corners=corners)
def create_tables(weights_dir, weighted_dir):
    return_tables = []
    in_dir = weights_dir
    reprojected_table = os.path.join(in_dir, 'weights_reprojected.tbl')
    montage.mImgtbl(in_dir, reprojected_table, corners=True)
    return_tables.append(reprojected_table)

    in_dir = weighted_dir
    reprojected_table = os.path.join(in_dir, 'int_reprojected.tbl')
    montage.mImgtbl(in_dir, reprojected_table, corners=True)
    return_tables.append(reprojected_table)

    return return_tables
示例#7
0
def create_table(in_dir, dir_type=None):
    """
    Create a metadata table using Montage for all the files in a given directory

    Parameters
    ----------
    in_dir : str
        Path to directory containing the files
    dir_type : str, optional
        type of file you are creating a table for, e.g., 'int, rrhr, wt' (Default: None)

    Returns
    -------
    reprojected_table 
        Path to the table containing the metadata
    """
    if dir_type is None:
        reprojected_table = os.path.join(in_dir, 'reprojected.tbl')
    else:
        reprojected_table = os.path.join(in_dir, dir_type + '_reprojected.tbl')
    montage.mImgtbl(in_dir, reprojected_table, corners=True)
    return reprojected_table
                    help="Check that each image has data of the source")

cmd_args = parser.parse_args()
pattern = os.path.join(cmd_args.dirs, "*-arcdata.json")

# Everything that is specific to a given user@machine is confined to
# the temporary directory.
tempdir = tempfile.mkdtemp()
#
# First, construct updated tables of all the FITS files available
#
tables = [os.path.join(tempdir, t) for t in ["small-all.tbl", "large-all.tbl"]]
for table, path in zip(tables, misc_utils.fits_dirs()):
    # This will rewrite the tables every time
    # FIXME should think of how to make it more efficient
    montage_wrapper.mImgtbl(path, table, recursive=True)

#
# Second, clean up and merge the tables
#
newtable_lines = []
for table in tables:
    table_lines = open(table).readlines()
    if not newtable_lines:
        # Copy header from first table
        newtable_lines = table_lines[0:3]
    for line in table_lines[3:]:
        fields = line.split()
        nhdu, fitsname = fields[-2:]
        # Check if there any let or hindrance
        if reasons_to_skip(fitsname): continue
示例#9
0
def Run(ra,
        dec,
        width,
        name=None,
        out_dir=None,
        temp_dir=None,
        replace=False,
        flux=True,
        thumbnails=False,
        gzip=True,
        montage_path=None,
        swarp_path=None):
    """
    Function to generate standardised cutouts of Herschel observations.

    Arguments
        ra: {float, sequence of float}
                A sequence of right ascension values, in decimal degrees, of the targets to be processed. Alternatively,
                if you're only interested in one target, a single RA value can be given here.
        dec: {float, sequence of float}
                A sequence of declination values, in decimal degrees, of the targets to be processed. Alternatively, if
                you're only interested in one target, a single Dec value can be given here.
        width: {float, sequence of float}
                A sequence giving the desired width of the cutout square for each target, in decimal degrees.
                Alternatively, if you're only interested in one target, a single width value can be given here.

    Keyword arguments
        name: {str, sequence of str}, optional
                A sequence giving the name of each target; if you're only interested in one target, a
                single name can be given here. If not provided, a name is constructed automatrically from the target
                coordinates, according to the IAU catalogue convention.
        out_dir: str, optional
                A string giving the path to the directory where the output FITS files will be placed. If not provided,
                files will simply be written to the current working directory.
        temp_dir: str, optional
                A string giving the path to be used as a temporary working directory by Herschel_Button. If not provided,
                a temporary directory will be created inside the output directory.
        replace: bool, optional
                If False, Herschel_Button will search the output directory for any pre-existing output FITS files from
                previous runs of the function, and will not bother repeat creating these maps (making it easy to resume
                processing a large number of targets from an interruption. If True, Herschel_Button will produce maps for
                all input targets, regardless of whether maps for these targets already exist in the output directory.
        flux: bool, optional
                If True, output maps will be in flux density units of Jy/pix. If false, output maps will be in surface
                brightness units of MJy/sr.
        thumbnails: bool, optional
                If True, JPG thumbnail images of the generated maps will also be proced and placed in out_dir.
        montage_path: str, optional
                Path to directory that contains the Montage commands (mProject, etc); useful if this directory is not in $PATH
        swarp_path: str: optional
                Path to directory that contains the SWarp command; useful if this directory is not in $PATH
    """

    # Handle Montage and SWarp paths, if kwargs provided
    if montage_path != None:
        os.environ['PATH'] += ':' + montage_path
    if swarp_path != None:
        os.environ['PATH'] += ':' + swarp_path
    import montage_wrapper

    # Make sure input values are in list format, and sort out variable names for rest of function
    if not hasattr(ra, '__iter__'):
        ra = [ra]
    ra_list = np.array(ra)
    del (ra)
    if not hasattr(dec, '__iter__'):
        dec = [dec]
    dec_list = np.array(dec)
    del (dec)

    # Check that ra and declists all have same lengths
    if np.std([float(len(ra_list)), float(len(dec_list))]) > 0:
        raise Exception(
            'Input sequences of ra and dec all need to be the same length')

    # If single width provided, but multiple coordinates, create width array of same value repeated required number of times
    if not hasattr(width, '__iter__'):
        if len(ra_list) > 1:
            width_list = [width] * len(ra_list)

        # Else, if only one RA and one width given, stick width value into list, too
        elif len(ra_list) == 1:
            width_list = [width]
    width_list = np.array(width_list)
    del (width)

    # If no names provided, use coordinates to generate standardised names as per IAU catalogue convention
    if not hasattr(name, '__iter__'):
        if (name == None):
            name = []
            for i in range(len(ra_list)):
                coord = astropy.coordinates.SkyCoord(
                    str(ra_list[i]) + 'd ' + str(dec_list[i]) + 'd')
                name_coord = re.sub('[hmsdms. ]', ' ',
                                    coord.to_string('hmsdms'))
                name_coord = name_coord.split(' ')
                name_coord[3] = name_coord[3][:min(2, len(name_coord[3]))]
                name_coord[8] = name_coord[8][:min(2, len(name_coord[8]))]
                name_coord = 'J' + ''.join(name_coord)
                name.append(
                    re.sub('[hmsdms. ]', ' ', coord.to_string('hmsdms')))

        # If only one name provided, stick it into an array
        name_list = np.array([name])

    # If a sequence of names is provided, make sure it's in array format (and stop single names becoming zero-dim array)
    else:
        name_list = np.array(copy.deepcopy(name))
        if name_list.shape == ():
            name_list = np.array([name_list.tolist()])
    del (name)

    # Do final check that all input sequences are the right length
    if np.std([
            float(ra_list.size),
            float(dec_list.size),
            float(width_list.size),
            float(name_list.size)
    ]) > 0:
        raise Exception(
            'Input sequences of ra, dec, with, and name all need to be the same length'
        )

    # If no outout directory specified, set to current working directory
    if out_dir == None:
        out_dir = os.getcwd()

    # Check that output directory exists
    if not os.path.exists(out_dir):
        raise Exception('Specified output directory does not exist')

    # Create temporary directory
    if temp_dir == None:
        temp_dir = os.path.join(out_dir, 'Temp')

    # Check that temp directory exists, if it does, warn user that contents may be overwritten
    if os.path.exists(temp_dir):
        print(
            'Specificed temporary directory already exists; note that any existing contents may be overwritten'
        )

    # Else, if temp directory doesn't already exist, create it
    else:
        os.mkdir(temp_dir)

    # State band information
    bands_dict = {
        '70': {
            'band': '70',
            'instrument': 'PACS',
            'wavelength': '70um',
            'filter': 'PHOTBLUE',
            'pix_size': 2,
            'hdr_inst_card_kwrd': 'CAMERA',
            'hdr_inst_card_entry': 'PHOTBLUE',
            'hdr_blueband_kwrd': 'blue1',
            'hdr_err_ext_name': 'stDev'
        },
        '100': {
            'band': '100',
            'instrument': 'PACS',
            'wavelength': '100um',
            'filter': 'PHOTGREEN',
            'pix_size': 3,
            'hdr_inst_card_kwrd': 'CAMERA',
            'hdr_inst_card_entry': 'PHOTBLUE',
            'hdr_blueband_kwrd': 'blue2',
            'hdr_err_ext_name': 'stDev'
        },
        '160': {
            'band': '160',
            'instrument': 'PACS',
            'wavelength': '160um',
            'filter': 'PHOTRED',
            'pix_size': 4,
            'hdr_inst_card_kwrd': 'CAMERA',
            'hdr_inst_card_entry': 'PHOTRED',
            'hdr_blueband_kwrd': False,
            'hdr_err_ext_name': 'stDev'
        },
        '250': {
            'band': '250',
            'instrument': 'SPIRE',
            'wavelength': '250um',
            'filter': 'PSW',
            'pix_size': 6,
            'hdr_inst_card_kwrd': 'DETECTOR',
            'hdr_inst_card_entry': 'PSW',
            'hdr_blueband_kwrd': False,
            'hdr_err_ext_name': 'error'
        },
        '350': {
            'band': '350',
            'instrument': 'SPIRE',
            'wavelength': '350um',
            'filter': 'PMW',
            'pix_size': 8,
            'hdr_inst_card_kwrd': 'DETECTOR',
            'hdr_inst_card_entry': 'PMW',
            'hdr_blueband_kwrd': False,
            'hdr_err_ext_name': 'error'
        },
        '500': {
            'band': '500',
            'instrument': 'SPIRE',
            'wavelength': '500um',
            'filter': 'PLW',
            'pix_size': 12,
            'hdr_inst_card_kwrd': 'DETECTOR',
            'hdr_inst_card_entry': 'PLW',
            'hdr_blueband_kwrd': False,
            'hdr_err_ext_name': 'error'
        }
    }

    # State map mode prefixes we care about
    req_obs_modes = [
        'SpirePhotoLargeScan', 'SpirePhotoSmallScan', 'PacsPhoto',
        'SpirePacsParallel'
    ]

    # Record time taken
    time_list = [time.time()]

    # Loop over each target
    for i in np.random.permutation(range(name_list.shape[0])):
        name = name_list[i].replace(' ', '_')
        ra = ra_list[i]
        dec = dec_list[i]
        width = width_list[i]

        # If we're not repeating already-processed targets, check if this target has already been completed
        if not replace:
            bands_done = 0
            for band in bands_dict.keys():
                if os.path.exists(
                        os.path.join(
                            out_dir, name + '_Herschel_' +
                            bands_dict[band]['wavelength'] + '.fits.gz')):
                    bands_done += 1

                # Also check for null files, indicated data not available for a givne band
                elif os.path.exists(
                        os.path.join(
                            out_dir, '.' + name + '_Herschel_' +
                            bands_dict[band]['wavelength'] + '.null')):
                    bands_done += 1

            # If this source has already been processed in all bands, skip it
            if bands_done == len(bands_dict.keys()):
                print(
                    'Herschel data for ' + name +
                    ' already processed (if available); continuing to next target'
                )
                time_list.append(time.time())
                continue
        print('Processing Herschel data for target ' + name)

        # Create field processing dirctories (deleting any prior)
        gal_dir = os.path.join(temp_dir, str(name)) + '/'
        if os.path.exists(gal_dir):
            ChrisFuncs.RemoveCrawl(gal_dir)
        if not os.path.exists(os.path.join(gal_dir, 'Raw')):
            os.makedirs(os.path.join(gal_dir, 'Raw'))
        os.chdir(os.path.join(gal_dir, 'Raw'))

        # Create band-specific directories
        for band in bands_dict.keys():
            if not os.path.exists(os.path.join(gal_dir, 'Raw', band)):
                os.makedirs(os.path.join(gal_dir, 'Raw', band))

        # Perform query, with error handling
        print('Querying HSA')
        query_success = False
        query_fail_count = 0
        while query_success == False:
            if query_fail_count >= 10:
                raise Exception(
                    'HSA query failing consistently; maybe HSA is down, or something else has gone wrong'
                )
            try:
                query_url = 'http://archives.esac.esa.int/hsa/aio/jsp/siap.jsp?POS=' + str(
                    ra) + ',' + str(dec) + '&SIZE=' + str(
                        width) + '&INTERSECT=OVERLAPS'
                query_filename = os.path.join(temp_dir, name,
                                              str(name) + '.vot')
                if os.path.exists(query_filename):
                    os.remove(query_filename)
                urllib.request.urlretrieve(query_url, query_filename)
                query_success = True
            except:
                print('HSA query failed; reattempting')
                query_fail_count += 1
                time.sleep(60)
        if not os.path.exists(query_filename):
            query_success = False

        # Read query result VOTable
        query_output = astropy.io.votable.parse_single_table(query_filename)
        query_table = query_output.array

        # Check if query returned any results; if not, create null file, and continue to next target
        if len(query_table) == 0:
            print('No Herschel coverage for ' + name +
                  '; continuing to next target')
            os.system('touch ' +
                      os.path.join(temp_dir, '.' + name + '_Herschel_' + band +
                                   '.null'))
            continue

        # Record which urls correspond to data in the desired modes (dealing with awkwardness for if there is only 1 entry, or silly massive files)
        hsa_urls = []
        if query_table.size == 1:
            if query_table['OBS_MODE'] in req_obs_modes:
                hsa_urls.append(query_table['DATA_ACCESS'])
        else:
            for j in range(0, query_table.size):
                if query_table['OBS_MODE'][j].decode('utf-8') in req_obs_modes:
                    hsa_urls.append(
                        query_table['DATA_LINK'][j].decode('utf-8'))

        # In parallel, download and extract files
        os.chdir(os.path.join(gal_dir, 'Raw'))
        dl_pool = mp.Pool(processes=20)
        for j in range(0, len(hsa_urls)):
            data_url = hsa_urls[j]
            data_filename = os.path.join(gal_dir, 'Raw',
                                         name + '_' + str(j) + '_HSA.fits')
            #dl_pool.apply_async( Herschel_Download, args=(data_url, data_filename,) )
            Herschel_Download(data_url, data_filename)
        dl_pool.close()
        dl_pool.join()

        # Loop over bands, and downloaded files (skipping folders), for sorting files into separate folders
        for band in bands_dict.keys():
            prev_hdr_filenames = []
            for listfile in os.listdir(os.path.join(gal_dir, 'Raw')):
                if '.tmp' in listfile:
                    os.remove(os.path.join(gal_dir, 'Raw', listfile))
                    continue
                if '.fits' not in listfile:
                    continue

                # Determine what band this is
                try:
                    list_hdr = astropy.io.fits.getheader(os.path.join(
                        gal_dir, 'Raw', listfile),
                                                         ext=0)
                except:
                    pdb.set_trace()
                if list_hdr['INSTRUME'] == bands_dict[band]['instrument']:
                    if list_hdr[bands_dict[band]
                                ['hdr_inst_card_kwrd']] == bands_dict[band][
                                    'hdr_inst_card_entry']:

                        # Handle the fact that 70um and 100um are hard to tell apart in headers
                        if bands_dict[band]['hdr_blueband_kwrd'] != False:
                            if bands_dict[band][
                                    'hdr_blueband_kwrd'] not in list_hdr[
                                        'BLUEBAND']:
                                continue

                        # Skip dud PACS calibration(?) maps
                        if list_hdr['OBSERVER'][-4:].lower() == 'pacs':
                            os.remove(os.path.join(gal_dir, 'Raw', listfile))
                            continue

                        # Check that we havne't already grabbed a duplicate of this map; if not, move it to band-specific directory
                        if 'FILENAME' in list_hdr.keys():
                            if list_hdr['FILENAME'] in prev_hdr_filenames:
                                os.remove(
                                    os.path.join(gal_dir, 'Raw', listfile))
                                continue
                            else:
                                prev_hdr_filenames.append(list_hdr['FILENAME'])
                        shutil.copy2(os.path.join(gal_dir, 'Raw', listfile),
                                     os.path.join(gal_dir, 'Raw', band))
                        os.remove(os.path.join(gal_dir, 'Raw', listfile))

        # Loop over PACS bands and files to delete dud PACS calibration(?) maps
        for band in bands_dict.keys():
            if bands_dict[band]['instrument'] == 'PACS':
                for listfile in os.listdir(os.path.join(gal_dir, 'Raw', band)):
                    if astropy.io.fits.getheader(
                            os.path.join(gal_dir, 'Raw', band, listfile),
                            ext=0)['OBSERVER'][-4:].lower() == 'pacs':
                        os.remove(os.path.join(gal_dir, 'Raw', band, listfile))

        # Loop over each band's files, to save image map to separate FITS files
        for band in bands_dict.keys():
            for listfile in os.listdir(os.path.join(gal_dir, 'Raw', band)):
                print('Extracting components from ' + band + ' um map ' +
                      listfile)
                if '.tmp' in listfile:
                    pdb.set_trace()

                # Check map has error and coverage data; open if so, skip forward if not
                with astropy.io.fits.open(
                        os.path.join(gal_dir, 'Raw', band,
                                     listfile)) as listfile_hdulist:
                    if len(listfile_hdulist) < 4:
                        print('Some FITS extensions missing from ' + band +
                              ' um map ' + listfile + '; skipping')
                        continue
                img_map, img_header = astropy.io.fits.getdata(os.path.join(
                    gal_dir, 'Raw', band, listfile),
                                                              header=True,
                                                              extname='image')

                # Record which image pixels are zeros, and convert to NaNs
                where_zero = np.where(img_map == 0)
                img_map[where_zero] = np.NaN
                astropy.io.fits.writeto(os.path.join(
                    gal_dir, 'Raw', band,
                    listfile.replace('.fits', '_Img.fits')),
                                        img_map,
                                        header=img_header)

                # Now save coverage and error maps to separate files, with zeros similarly converted to NaNs
                cov_map, cov_header = astropy.io.fits.getdata(
                    os.path.join(gal_dir, 'Raw', band, listfile),
                    header=True,
                    extname='coverage')
                cov_map[where_zero] = np.NaN
                astropy.io.fits.writeto(os.path.join(
                    gal_dir, 'Raw', band,
                    listfile.replace('.fits', '_Cov.fits')),
                                        cov_map,
                                        header=cov_header)
                err_map, err_header = astropy.io.fits.getdata(
                    os.path.join(gal_dir, 'Raw', band, listfile),
                    header=True,
                    extname=bands_dict[band]['hdr_err_ext_name'])
                err_map[where_zero] = np.NaN
                astropy.io.fits.writeto(os.path.join(
                    gal_dir, 'Raw', band,
                    listfile.replace('.fits', '_Error.fits')),
                                        err_map,
                                        header=err_header)

        # Loop over each band for coaddition
        for band in bands_dict.keys():
            if not os.path.exists(os.path.join(gal_dir, 'Raw', band)):
                continue
            if len(os.path.join(gal_dir, 'Raw', band)) == 0:
                continue
            print('Commencing processing of ' + name + '_Herschel_' + band)

            # Create processing directories
            os.chdir(os.path.join(gal_dir, 'Raw', band))
            os.mkdir(os.path.join(gal_dir, 'Raw', band, 'Img_Maps'))
            os.mkdir(os.path.join(gal_dir, 'Raw', band, 'Cov_Maps'))
            os.mkdir(os.path.join(gal_dir, 'Raw', band, 'Err_Maps'))
            os.mkdir(os.path.join(gal_dir, 'Raw', band, 'Exp_Maps'))
            os.mkdir(os.path.join(gal_dir, 'Raw', band, 'Wgt_Temp'))
            os.mkdir(os.path.join(gal_dir, 'Raw', band, 'Pff_Temp'))
            os.mkdir(os.path.join(gal_dir, 'Raw', band, 'Backsub_Temp'))
            os.mkdir(os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp'))

            # Create Montage FITS header
            location_string = str(ra) + ' ' + str(dec)
            pix_size = bands_dict[band]['pix_size']
            montage_wrapper.mHdr(location_string,
                                 width,
                                 os.path.join(gal_dir, 'Raw', band,
                                              str(name) + '.hdr'),
                                 pix_size=pix_size)

            # Use Montage wrapper to reproject all fits files to common projection, skipping if none acually overlap
            print('Performing reprojections for ' + name + '_Herschel_' +
                  band + ' maps')
            target_files = []
            proj_fail = 0
            [
                target_files.append(target_file) for target_file in os.listdir(
                    os.path.join(gal_dir, 'Raw', band))
                if '.fits' in target_file
            ]
            for target_file in target_files:
                try:
                    montage_wrapper.reproject(
                        os.path.join(
                            os.path.join(gal_dir, 'Raw', band, target_file)),
                        os.path.join(
                            os.path.join(gal_dir, 'Raw', band, target_file)),
                        header=os.path.join(gal_dir, 'Raw', band,
                                            str(name) + '.hdr'),
                        exact_size=True)
                except:
                    os.remove(
                        os.path.join(
                            os.path.join(gal_dir, 'Raw', band, target_file)))
                    proj_fail += 1
            if proj_fail == len(target_files):
                print('No Herschel coverage for ' + name + ' at ' + band)
                os.system('touch ' + os.path.join(
                    temp_dir, '.' + name + '_Herschel_' + band + '.null'))
                continue

            # Move reprojcted maps to relevant locations
            for listfile in os.listdir(os.path.join(gal_dir, 'Raw', band)):
                if '_Img.fits' in os.path.join(gal_dir, 'Raw', band, listfile):
                    shutil.move(os.path.join(gal_dir, 'Raw', band, listfile),
                                os.path.join(gal_dir, 'Raw', band, 'Img_Maps'))
                elif '_Cov.fits' in os.path.join(gal_dir, 'Raw', band,
                                                 listfile):
                    shutil.move(os.path.join(gal_dir, 'Raw', band, listfile),
                                os.path.join(gal_dir, 'Raw', band, 'Cov_Maps'))
                elif '_Error.fits' in os.path.join(gal_dir, 'Raw', band,
                                                   listfile):
                    shutil.move(os.path.join(gal_dir, 'Raw', band, listfile),
                                os.path.join(gal_dir, 'Raw', band, 'Err_Maps'))

            # If only one image file, proceed straight to co-adding; otherwise, commence background-matching
            mosaic_count = 0
            for listfile in os.listdir(
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps')):
                if '_Img.fits' in listfile:
                    mosaic_count += 1
            if mosaic_count == 1:
                for listfile in os.listdir(
                        os.path.join(gal_dir, 'Raw', band, 'Img_Maps')):
                    if '.fits' in listfile:
                        shutil.move(
                            os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                         listfile),
                            os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp'))
                mBgExec_uberfail = False
            if mosaic_count > 1:

                # Use Montage wrapper to determine appropriate corrections for background matching
                print('Determining background corrections for ' + name +
                      '_Herschel_' + band + ' maps')
                os.chdir(os.path.join(gal_dir, 'Raw', band, 'Img_Maps'))
                montage_wrapper.mImgtbl(
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps'),
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Metadata_Table.dat'),
                    corners=True)
                montage_wrapper.mOverlaps(
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Metadata_Table.dat'),
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Diffs_Table.dat'))
                montage_wrapper.mDiffExec(
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Diffs_Table.dat'),
                    os.path.join(gal_dir, 'Raw', band,
                                 str(name) + '.hdr'),
                    os.path.join(gal_dir, 'Raw', band, 'Pff_Temp'),
                    no_area=True,
                    proj_dir=os.path.join(gal_dir, 'Raw', band, 'Img_Maps'))
                montage_wrapper.mFitExec(
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Diffs_Table.dat'),
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Fitting_Table.dat'),
                    os.path.join(gal_dir, 'Raw', band, 'Pff_Temp'))
                montage_wrapper.mBgModel(
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Metadata_Table.dat'),
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Fitting_Table.dat'),
                    os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                 band + '_Image_Corrections_Table.dat'),
                    level_only=True,
                    n_iter=16384)

                # Apply background corrections using Montage subprocess, with timeout handling
                print('Applying background corrections to ' + name +
                      '_Herschel_' + band + ' maps')
                mBgExec_fail_count = 0
                mBgExec_success = False
                mBgExec_uberfail = False
                while mBgExec_success == False:

                    # Attempt background-matching
                    mBgExec_sp = subprocess.Popen([
                        'mBgExec', '-n', '-p',
                        os.path.join(gal_dir, 'Raw', band, 'Img_Maps'),
                        os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                     band + '_Image_Metadata_Table.dat'),
                        os.path.join(gal_dir, 'Raw', band, 'Img_Maps',
                                     band + '_Image_Corrections_Table.dat'),
                        os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp')
                    ],
                                                  preexec_fn=os.setsid,
                                                  stdout=subprocess.PIPE)
                    mBgExec_fail = False
                    seconds = 0
                    minutes_max = 45
                    while mBgExec_fail == False:
                        time.sleep(1)
                        mBgExec_stdout = mBgExec_sp.stdout.readline().decode()
                        if mBgExec_sp.poll() == None:
                            seconds += 1
                        if 'Table has no data records' in mBgExec_stdout:
                            mBgExec_fail = True
                            mBgExec_fail_count += 1
                            break
                        if seconds >= (60 * minutes_max):
                            mBgExec_fail = True
                            mBgExec_fail_count += 1
                            break
                        if mBgExec_sp.poll() != None:
                            mBgExec_success = True
                            break

                    # Handle timeouts and other failures
                    if mBgExec_fail_count > 1:
                        print('Background matching with Montage has failed ' +
                              str(mBgExec_fail_count) +
                              ' time(s); reattempting')
                    if mBgExec_fail == True and mBgExec_success == False and mBgExec_fail_count >= 5:
                        mBgExec_uberfail = True
                        print(
                            'Background matching with Montage has failed 5 times; proceeding directly to co-additon'
                        )
                        try:
                            os.killpg(os.getpgid(mBgExec_sp.pid), 15)
                        except:
                            'Background matching subprocess appears to have imploded; no task to kill'
                        break
            if mBgExec_uberfail:
                raise Exception(
                    'Background matching with Montage has failed utterly')
                """for listfile in os.listdir(os.path.join(gal_dir,'Raw',band,'Img_Maps')):
                    if '_HSA_Img.fits' in listfile:
                        shutil.move(listfile, os.path.join(gal_dir,'Raw',band,'SWarp_Temp'))"""

            # Create weight maps, and copy to SWarp directory
            for listfile in os.listdir(
                    os.path.join(gal_dir, 'Raw', band, 'Cov_Maps')):
                if '.fits' in listfile:
                    shutil.copy2(
                        os.path.join(gal_dir, 'Raw', band, 'Cov_Maps',
                                     listfile),
                        os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp'))
                    wgt_image, wgt_header = astropy.io.fits.getdata(
                        os.path.join(gal_dir, 'Raw', band, 'Cov_Maps',
                                     listfile),
                        header=True)
                    wgt_image = wgt_image**0.5
                    astropy.io.fits.writeto(os.path.join(
                        gal_dir, 'Raw', band, 'SWarp_Temp',
                        listfile.replace('_Cov.fits', '_Wgt.fits')),
                                            wgt_image,
                                            header=wgt_header)

            # Sort out daft filename differences between image maps and error maps
            for listfile in os.listdir(
                    os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp')):
                os.rename(
                    os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp', listfile),
                    os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp',
                                 listfile.replace('_Img.fits', '.fits')))

            # Perform least-squares plane fitting to match image levels
            ChrisFuncs.Coadd.LevelFITS(os.path.join(gal_dir, 'Raw', band,
                                                    'SWarp_Temp'),
                                       'Img.fits',
                                       convfile_dir=False)

            # Use SWarp to co-add images weighted by their coverage maps
            print('Co-adding ' + name + '_Herschel_' + band + ' maps')
            os.chdir(os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp'))
            os.system(
                'swarp *HSA.fits -IMAGEOUT_NAME ' + name + '_Herschel_' +
                band +
                '_SWarp.fits -WEIGHT_SUFFIX _Wgt.fits -WEIGHT_TYPE MAP_RMS -COMBINE_TYPE WEIGHTED -COMBINE_BUFSIZE 2048 -GAIN_KEYWORD DIESPIZERDIE -RESCALE_WEIGHTS N -SUBTRACT_BACK N -RESAMPLE N -VMEM_MAX 4095 -MEM_MAX 4096 -WEIGHT_TYPE MAP_WEIGHT -NTHREADS 4 -VERBOSE_TYPE QUIET'
            )
            Herschel_SWarp_NaN(name + '_Herschel_' + band + '_SWarp.fits')

            # Check that the final maps provides actual coverage of the point in question
            coadd_image, coadd_header = astropy.io.fits.getdata(os.path.join(
                gal_dir, 'Raw', band, 'SWarp_Temp',
                name + '_Herschel_' + band + '_SWarp.fits'),
                                                                header=True)
            coadd_wcs = astropy.wcs.WCS(coadd_header)
            coords_xy = np.round(
                coadd_wcs.all_world2pix(np.array([[ra, dec]]), 0)).astype(int)
            coord_i, coord_j = coords_xy[0, 1], coords_xy[0, 0]
            if np.isnan(
                    np.nanmax(coadd_image[coord_i - 2:coord_i + 2 + 1,
                                          coord_j - 2:coord_j + 2 + 2])):
                print('No Herschel coverage for ' + name + ' at ' + band)
                os.system('touch ' + os.path.join(
                    temp_dir, '.' + name + '_Herschel_' + band + '.null'))
                continue

            # Re-project finalised image map using Montage
            montage_wrapper.reproject(
                os.path.join(gal_dir, 'Raw', band, 'SWarp_Temp',
                             name + '_Herschel_' + band + '_SWarp.fits'),
                os.path.join(gal_dir, name + '_Herschel_' + band + '.fits'),
                header=os.path.join(gal_dir, 'Raw', band,
                                    str(name) + '.hdr'),
                exact_size=True)

            # Compress finalised FITS file
            os.chdir(gal_dir)
            if gzip:
                os.system('gzip ' +
                          os.path.join(gal_dir, name + '_Herschel_' + band +
                                       '.fits'))
            print('Completed processing ' + name + '_Herschel_' + band +
                  ' image map')

            # Turn error maps into exposure time maps
            for listfile in os.listdir(
                    os.path.join(gal_dir, 'Raw', band, 'Err_Maps')):
                if '_Error.fits' in listfile:
                    err_image, err_header = astropy.io.fits.getdata(
                        os.path.join(gal_dir, 'Raw', band, 'Err_Maps',
                                     listfile),
                        header=True)
                    err_image = err_image**-2.0
                    astropy.io.fits.writeto(os.path.join(
                        gal_dir, 'Raw', band, 'Exp_Maps',
                        listfile.replace('_Error.fits', '_Exp.fits')),
                                            err_image,
                                            header=err_header)

            # Use Montage to add exposure time images
            print('Processing ' + name + '_Herschel_' + band +
                  ' uncertainty map')
            target_files = []
            [
                target_files.append(dir_file) for dir_file in os.listdir(
                    os.path.join(gal_dir, 'Raw', band, 'Exp_Maps'))
                if '_Exp.fits' in dir_file
            ]
            for i in range(0, len(target_files)):
                exp_image, exp_header = astropy.io.fits.getdata(os.path.join(
                    gal_dir, 'Raw', band, 'Exp_Maps', target_files[i]),
                                                                header=True)
                if i == 0:
                    add_image = np.zeros(
                        [exp_image.shape[0], exp_image.shape[1]])
                    add_header = exp_header.copy()
                exp_good = np.where(np.isnan(exp_image) == False)
                add_image[exp_good] += exp_image[exp_good]
            add_hdu = astropy.io.fits.PrimaryHDU(data=add_image,
                                                 header=add_header)
            add_hdulist = astropy.io.fits.HDUList([add_hdu])
            astropy.io.fits.writeto(os.path.join(
                gal_dir, 'Raw', band, 'Exp_Maps',
                name + '_Herschel_' + band + '_Exp_Add.fits'),
                                    add_image,
                                    header=add_header,
                                    clobber=True)

            # Re-project final exposure map using Montage
            montage_wrapper.reproject(
                os.path.join(gal_dir, 'Raw', band, 'Exp_Maps',
                             name + '_Herschel_' + band + '_Exp_Add.fits'),
                os.path.join(gal_dir, 'Raw', band, 'Exp_Maps',
                             name + '_Herschel_' + band + '_Exp.fits'),
                header=os.path.join(gal_dir, 'Raw', band,
                                    str(name) + '.hdr'),
                exact_size=True)

            # Convert final exposure time map into error map
            err_image, err_header = astropy.io.fits.getdata(os.path.join(
                gal_dir, 'Raw', band, 'Exp_Maps',
                name + '_Herschel_' + band + '_Exp.fits'),
                                                            header=True)
            err_image[np.where(err_image < 0)] = np.NaN
            err_image = err_image**-0.5
            err_image[np.where(err_image == np.inf)] = np.NaN
            astropy.io.fits.writeto(os.path.join(
                gal_dir, name + '_Herschel_' + band + '_Error.fits'),
                                    err_image,
                                    header=err_header,
                                    clobber=True)

            # Compress finalised exposure time map
            os.chdir(out_dir)
            if gzip:
                os.system('gzip ' + os.path.join(
                    gal_dir, name + '_Herschel_' + band + '_Error.fits'))
            print('Completed processing ' + name + '_Herschel_' + band +
                  ' uncertainty map')

        # In parallel, generate final standardised maps for each band
        pool = mp.Pool(processes=9)
        for key in bands_dict.keys():
            band_dict = bands_dict[key]
            #pool.apply_async( Herschel_Generator, args=(name, ra, dec, temp_dir, out_dir, band_dict, flux, thumbnails, gzip=gzip,) )
            Herschel_Generator(name,
                               ra,
                               dec,
                               temp_dir,
                               out_dir,
                               band_dict,
                               flux,
                               thumbnails,
                               gzip=gzip)
        pool.close()
        pool.join()

        # Clean memory, and return timings (if more than one target being processed)
        gc.collect()
        time_list.append(time.time())
        time_est = ChrisFuncs.TimeEst(time_list, len(name_list))
        if len(name) > 1:
            print(
                'Estimated time until Herschel data completed for all targets: '
                + time_est)

        # Tidy up (best as we can)
        gc.collect()
        try:
            shutil.rmtree(temp_dir)
        except:
            ChrisFuncs.RemoveCrawl(temp_dir)
            print(
                'Unable to fully tidy up temporary directory; probably due to NFS locks on network drive'
            )

    # Report completion
    print('Total time elapsed: ' + str((time.time() - time_list[0]) / 3600.0) +
          ' hours')
    print('All available Herschel imagery acquired for all targets')
    def mosaic_band(self,band,ra,dec,margin,radius,pgc):#,clean=True):
        '''
        Input: source info param
        Create a mosaic fit file for the specified band.
        Return: String filename of resulting mosaic
        '''
        print ("------------------mosaic_band----------------------")
        DEBUG = True
        # output = open("rc3_galaxies_outside_SDSS_footprint.txt",'a') # 'a' for append #'w')
        # unclean = open("rc3_galaxies_unclean","a")
        # filename = "{},{}".format(str(ra),str(dec))
        filename = str(ra)+str(dec)
        #print (margin/radius)
        if (DEBUG) : print ("Querying data that lies inside margin")
        #result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE  ra between {0}-{1} and  {0}+{1}and dec between {2}-{3} and  {2}+{3}".format(str(ra),str(margin),str(dec),str(margin))).readlines()
        result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE  ra between "+str(ra)+"-"+str(margin)+" and " +str(ra)+"+"+str(margin)+"and dec between "+str(dec)+"-"+str(margin)+" and "+ str(dec)+"+"+str(margin)).readlines()
        clean_result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE  CLEAN =1 and ra between "+str(ra)+"-"+str(margin)+" and " +str(ra)+"+"+str(margin)+"and dec between "+str(dec)+"-"+str(margin)+" and "+ str(dec)+"+"+str(margin)).readlines()
        # clean_result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE  CLEAN =1 and ra between {0}-{1} and  {0}+{1}and dec between {2}-{3} and  {2}+{3}".format(str(ra),str(margin),str(dec),str(margin))) .readlines()
        clean = True
        print (result)
        print (clean_result)
        if (result[0][5:]=="<html>"):
            print("strange error from SQL server")
            return -1
        if (result[1]=='error_message\n' or clean_result[1]=='error_message\n'):
    	    #Case where doing more than 60 queries in 1 minute
            time.sleep(60)
            #results are messed up, need to re-query
            result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE  ra between "+str(ra)+"-"+str(margin)+" and " +str(ra)+"+"+str(margin)+"and dec between "+str(dec)+"-"+str(margin)+" and "+ str(dec)+"+"+str(margin)).readlines()
            clean_result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE  CLEAN =1 and ra between "+str(ra)+"-"+str(margin)+" and " +str(ra)+"+"+str(margin)+"and dec between "+str(dec)+"-"+str(margin)+" and "+ str(dec)+"+"+str(margin)).readlines()
        if (len(result)!=len(clean_result) and band=='u'):
            #only print this once in the u band. If it is unclean in u band (ex. cosmic ray, bright star..etc) then it must be unclean in the other bands too.
            print ("Data contain unclean images")
            clean=False
            unclean.write(str(ra)+"     "+str(dec)+"     "+str(radius)+"     "+pgc)
            # unclean.write("{}     {}     {}     {} \n".format(str(ra),str(dec),str(radius),pgc))    
        data =[]
        count =0
        for i in result:
            if count>1:
                list =i.split(',')
                list[2]= list[2][:-1]
                data.append(list)
            count += 1 
        print (data)
        if (len(data)==0 and band=='r'): #you will only evounter non-footprint galaxy inint run , because after that we just take the footprint gaalxy already mosaiced (init) from rfits
            if (DEBUG): print ('The given ra, dec of this galaxy does not lie in the SDSS footprint. Onto the next galaxy!')#Exit Program.'
            output.write(str(ra)+ "     "+ str(dec)+"     "+str(radius)+"\n")
            # output.write("{}     {}     {}     {} \n".format(str(ra),str(dec),str(radius),pgc))
            output.write(str(ra)+"     "+str(dec)+"     "+str(radius)+"     "+pgc)
            #sys.exit()
            return -1 #special value reserved for not in SDSS footprint galaxies
        else :
            if (DEBUG): 
                print ( "Complete Query. These data lies within margin: ")
                print (data)
        # os.mkdir(filename)
        # os.chdir(filename)
        #if (os.path.exists(band)):
	    #os.system("rm -r "+band)
        os.mkdir(band)
        os.chdir(band)
        os.mkdir ("raw")
        os.mkdir ("projected")
        os.chdir("raw")
        if (DEBUG): print ("Retrieving data from SDSS SAS server for "+ band +"band")
        for i in data :  
            out = "frame-"+str(band)+"-"+str(i[0]).zfill(6)+"-"+str(i[1])+"-"+str(i[2]).zfill(4)
            os.system("wget http://mirror.sdss3.org/sas/dr10/boss/photoObj/frames/301/"+str(i[0])+"/"+str(i[1])+"/"+out+".fits.bz2")
            os.system("bunzip2 "+out+".fits.bz2")
        os.chdir("../")
        if (DEBUG) : print("Creating mosaic for "+band+" band.")
        outfile_r="SDSS_"+band+"_"+str(ra)+"_"+str(dec)+"r.fits"
        outfile="SDSS_"+band+"_"+str(ra)+"_"+str(dec)+".fits"
        if (len(data)==1):
    	    #With header info, len of processed result list is 1 if there is only 1 field lying in the margin, simply do mSubImage without mosaicing
    	    #This patch should not be necessary but the program is aparently not mosaicing for the case where there is only one field.
            print ("Only one field in region of interest")
            os.chdir("raw")
            montage.mSubimage(out+".fits",outfile,ra,dec,2*margin) # mSubImage takes xsize which should be twice the margin (margin measures center to edge of image)
            #os.chdir("../..")
            hdulist = pyfits.open(outfile)
            shutil.move(outfile,"../..")
            os.chdir("../..")
        else:
            montage.mImgtbl("raw","images.tbl")
            montage.mHdr(str(ra)+" "+str(dec),margin,out+".hdr")
            if (DEBUG): print ("Reprojecting images")
            os.chdir("raw")
            montage.mProjExec("../images.tbl","../"+out+".hdr","../projected", "../stats.tbl") 
            os.chdir("..")
            montage.mImgtbl("projected","pimages.tbl")
            os.chdir("projected")
            montage.mAdd("../pimages.tbl","../"+out+".hdr","SDSS_"+out+".fits")
            # outfile_r="SDSS_{}_{}_{}r.fits".format(band,str(ra),str(dec))
            #outfile_r="SDSS_"+band+"_"+str(ra)+"_"+str(dec)+"r.fits"
            montage.mSubimage("SDSS_"+out+".fits",outfile_r,ra,dec,2*margin) # mSubImage takes xsize which should be twice the margin (margin measures center to edge of image)
            shutil.move(outfile_r,os.getcwd()[:-11] )#if change to :-11 then move out of u,g,r,i,z directory, may be more convenient for mJPEG
            if (DEBUG) : print ("Completed Mosaic for " + band)
            os.chdir("../..")
            hdulist = pyfits.open(outfile_r)
        hdulist[0].header['RA']=ra
        hdulist[0].header['DEC']=dec
        hdulist[0].header['RADIUS']=radius
        hdulist[0].header['PGC']=pgc
        hdulist[0].header['NED']=("http://ned.ipac.caltech.edu/cgi-bin/objsearch?objname="+ str(hdulist[0].header['PGC'])+"&extend=no&hconst=73&omegam=0.27&omegav=0.73&corr_z=1&out_csys=Equatorial&out_equinox=J2000.0&obj_sort=RA+or+Longitude&of=pre_text&zv_breaker=30000.0&list_limit=5&img_stamp=YES")
        hdulist[0].header['CLEAN']=clean
        hdulist[0].header['MARGIN']=margin
        
        #if (os.path.exists(outfile)):
            #os.system("rm "+ outfile)
        hdulist.writeto(outfile)
        if (os.path.exists(outfile_r)):
            os.system("rm "+outfile_r)
        #print("Deleting")
        os.system("rm -r "+band+"/")
        print ("Completed Mosaic")
        return outfile 
for ele in bands:
    band =ele
    os.mkdir(band)
    os.chdir(band)
    os.mkdir ("raw")
    os.mkdir ("projected")
    os.chdir("raw")
    if (DEBUG): print ("Retrieving data from SDSS SAS server for "+ band +"band")
    for i in data :  
        out = "frame-"+str(band)+"-"+str(i[0]).zfill(6)+"-"+str(i[1])+"-"+str(i[2]).zfill(4)
        os.system("wget http://data.sdss3.org/sas/dr10/boss/photoObj/frames/301/"+str(i[0])+"/"+ str(i[1]) +"/"+out+".fits.bz2")
        os.system("bunzip2 "+out+".fits.bz2")
    # print (os.getcwd())
    os.chdir("../")
    if (DEBUG) : print("Creating mosaic for " +" "+ band + " band.")
    montage.mImgtbl("raw","images.tbl")
    montage.mHdr(str(ra)+" "+str(dec),radius,out+".hdr")
    if (DEBUG): print ("Reprojecting images")
	#Sometimes you can't find the files and result in images.tbl => empty doc
	#need to put data file inside raw AND unzip it so that Montage detect that it is a fit file
    os.chdir("raw")
    montage.mProjExec("../images.tbl","../"+out+".hdr","../projected", "../stats.tbl") 
    os.chdir("..")
    montage.mImgtbl("projected","pimages.tbl")
	#mAdd coadds the reprojected images using the FITS header template and mImgtbl list.
    os.chdir("projected")
    montage.mAdd("../pimages.tbl","../"+out+".hdr","SDSS_"+out+".fits")
    montage.mSubimage("SDSS_"+out+".fits","SDSS_"+ele+"_"+str(trunc(ra))+"_"+str(trunc(dec))+".fits",ra,dec,2*margin) # mSubImage takes xsize which should be twice the margin (margin measures center to edge of image)
    shutil.move("SDSS_"+ele+"_"+str(trunc(ra))+"_"+str(trunc(dec))+".fits",os.getcwd()[:-11] )#if change to :-11 then move out of u,g,r,i,z directory, may be more convenient for mJPEG
    if (DEBUG) : print ("Completed Mosaic for " + band)
    os.chdir("../..")
示例#12
0
			hdr_in.close()
			hdr_out.close()
			os.mkdir(bands)
			os.mkdir(bands+'/raw')
			coor = ascii.read(list_dir+'list_'+bands+'.txt')
				
			for icoor in range(len(coor)):
				if (l_0+1.5 > coor['l'][icoor]/10.0 and l_0-1.5 < coor['l'][icoor]/10.0):
					shutil.copy(data_dir+coor['Name'][icoor], 
						bands+'/raw/'+coor['Name'][icoor])
			
			os.chdir(bands)
			os.mkdir('projected')
			os.mkdir('diffdir')
			os.mkdir('corrdir')
			mt.mImgtbl('raw','rimages.tbl')
			mt.mProjExec('rimages.tbl', '../'+bands+'.hdr', 'projected', 'stats.tbl',
				raw_dir='raw')
			mt.mImgtbl('projected', 'pimages.tbl')
			len_dir = len(os.listdir('projected'))
			if len_dir < 3 :
				mt.mAdd('pimages.tbl', '../'+bands+'.hdr', 
					'../'+sour_name+'_'+bands+'.fits', 
					img_dir='projected')
			else:
				mt.mOverlaps('pimages.tbl', 'diffs.tbl')
				mt.mDiffExec('diffs.tbl',  '../'+bands+'.hdr', 'diffdir', 
					proj_dir = 'projected')
				mt.mFitExec('diffs.tbl', 'fits.tbl', 'diffdir')
				if ((len(os.listdir("diffdir")) > 1 and 
					os.path.getsize("diffdir/"+os.listdir("diffdir")[1]) < 10000) or 
示例#13
0
def mosaic(input_files, mosaic_file, work_dir, ext=0, background_match=False,
           cdelt=None, density=False, equinox=None, header=None,
           level_only=False, north_aligned=False, postprocess=None,
           preprocess=None, system=None, weights_file=None):
    """Make a mosiac.

    High-level wrapper around several Montage operations similar to
    `montage_wrapper.mosaic`. The main differences are 1) added support for
    preprocessing the input images before reprojection and postprocessing
    the final image after mosaicking, 2) options for using images in total
    flux units instead of flux density (as assumed by Montage), 3) more of
    the `montage_wrapper.mMakeHdr` keywords available for header creation,
    and 4) the `whole` keyword for `montage_wrapper.mProjExec` is
    automatically set to True when `background_match` is True. The latter
    is important since backround matching behaves unreliably otherwise.

    Parameters
    ----------
    input_files : list or string
        List of paths to the input images. This may also be the path to a
        directory containing all input images, in which case `input_files`
        will automatically be set to a list of all files in the directory
        ending with ".fits".
    mosaic_file : str
        Path to the output mosaic file. The final mosaic always has the
        same units as the `input_files` images.
    work_dir : str
        Path to the working directory for all intermediate files produced
        by Montage. The directory has the following structure::

          work_dir/
            input/
              Contains either symlinks to `input_files` or new files
              depending on the `preprocess` and `density` keywords.
              Assuming the `density` keyword has been set correctly, these
              images will always be in flux density units.
            reprojected/
              The reprojected images.
            differences/
              Difference calculations for background matching (only if
              `background_match` is True).
            corrected/
              Background-matched images (only if `background_match` is
              True).
            output/
              The intermediate mosiac used to produce the final mosaic
              file, depending on the `density` and `postprocess` keywords.

    background_match : bool, optional
        If True, match the background levels of the reprojected images
        before mosaicking. Automatically sets ``whole = True`` in
        `montage_wrapper.mProjExec`. Default is False.
    cdelt : float, optional
        See `header` and `montage_wrapper.mMakeHdr`. Default is None.
    density : bool, optional
        If True, the input images are in flux density units (i.e., signal
        per unit pixel area). If False (default), the input images are
        assumed to be in units of total flux, and are automatically scaled
        to flux density before reprojection.
    equinox : str, optional
        See `header` and `montage_wrapper.mMakeHdr`. Default is None.
    header : str, optional
        Path to the template header file describing the output mosaic.
        Default is None, in which case a template header is created
        automatically using `montage_wrapper.mMakeHdr` and the `cdelt`,
        `equinox`, `north_aligned`, and `system` keyword arguments.
    level_only : bool, optional
        See `montage_wrapper.mBgModel`. Ignored if `background_match` is
        False. Default is False.
    north_aligned : bool, optional
        See `header` and `montage_wrapper.mMakeHdr`. Default is None.
    postprocess, preprocess : function, optional
        Functions for processing the raw input images before the input
        density images are created (`preprocess`) and after the final
        mosaic is created (`postprocess`). The function arguments should be
        the image data array and the image header
        (`astropy.io.fits.Header`), and the return values should be the
        same. Default is None.
    system : str, optional
        See `header` and `montage_wrapper.mMakeHdr`. Default is None.
    weights_file : str, optional
        Path to output pixel weights file. Pixel weights are derived from
        the final mosaic area file. Weights are normalized to 1, and
        represent coverage of the mosaic area by the input images. Unlike
        Montage area files, regions where the input images overlap are not
        considered. Default is None.

    Returns
    -------
    None

    """
    # Get list of files if input_files is a directory name
    if isinstance(input_files, basestring):
        dirname = os.path.dirname(input_files)
        input_files = [os.path.join(dirname, basename)
                       for basename in os.listdir(dirname)
                       if os.path.splitext(basename)[1] == '.fits']

    # Create working directory
    try:
        os.makedirs(work_dir)
    except OSError:
        shutil.rmtree(work_dir)
        os.makedirs(work_dir)


    # Create input directory, populate it, and get image metadata
    input_dir = os.path.join(work_dir, 'input')
    os.mkdir(input_dir)

    if preprocess or not density or ext>0:
        # Create new input files
        for input_file in input_files:
            data, hdr = astropy.io.fits.getdata(input_file, header=True,
                                                ext=ext)
            if preprocess:
                data, hdr = preprocess(data, hdr)

            if not density:
                # Convert total flux into flux density
                dx, dy = wcs.calc_pixscale(hdr, ref='crpix').arcsec
                pixarea = dx * dy  # arcsec2
                data /= pixarea

            # Write
            basename = os.path.basename(input_file)
            basename = '_density'.join(os.path.splitext(basename))
            new_input_file = os.path.join(input_dir, basename)
            hdu = astropy.io.fits.PrimaryHDU(data=data, header=hdr)
            hdu.writeto(new_input_file, output_verify='ignore')

    else:
        # Symlink existing files
        for input_file in input_files:
            basename = os.path.basename(input_file)
            new_input_file = os.path.join(input_dir, basename)
            os.symlink(input_file, new_input_file)

    input_table = os.path.join(input_dir, 'input.tbl')
    montage.mImgtbl(input_dir, input_table, corners=True)

    # Template header
    if header is None:
        template_header = os.path.join(work_dir, 'template.hdr')
        montage.mMakeHdr(input_table, template_header, cdelt=cdelt,
                         equinox=equinox, north_aligned=north_aligned,
                         system=system)
    else:
        template_header = header

    # Create reprojection directory, reproject, and get image metadata
    proj_dir = os.path.join(work_dir, 'reprojected')
    os.makedirs(proj_dir)
    whole = True if background_match else False
    stats_table = os.path.join(proj_dir, 'mProjExec_stats.log')

    montage.mProjExec(input_table, template_header, proj_dir, stats_table,
                      raw_dir=input_dir, whole=whole)

    reprojected_table = os.path.join(proj_dir, 'reprojected.tbl')
    montage.mImgtbl(proj_dir, reprojected_table, corners=True)

    # Background matching
    if background_match:
        diff_dir = os.path.join(work_dir, 'differences')
        os.makedirs(diff_dir)

        # Find overlaps
        diffs_table = os.path.join(diff_dir, 'differences.tbl')
        montage.mOverlaps(reprojected_table, diffs_table)

        # Calculate differences between overlapping images
        montage.mDiffExec(diffs_table, template_header, diff_dir,
                          proj_dir=proj_dir)

        # Find best-fit plane coefficients
        fits_table = os.path.join(diff_dir, 'fits.tbl')
        montage.mFitExec(diffs_table, fits_table, diff_dir)

        # Calculate corrections
        corr_dir = os.path.join(work_dir, 'corrected')
        os.makedirs(corr_dir)
        corrections_table = os.path.join(corr_dir, 'corrections.tbl')
        montage.mBgModel(reprojected_table, fits_table, corrections_table,
                         level_only=level_only)

        # Apply corrections
        montage.mBgExec(reprojected_table, corrections_table, corr_dir,
                        proj_dir=proj_dir)

        img_dir = corr_dir

    else:
        img_dir = proj_dir


    # Make mosaic
    output_dir = os.path.join(work_dir, 'output')
    os.makedirs(output_dir)

    out_image = os.path.join(output_dir, 'mosaic.fits')
    montage.mAdd(reprojected_table, template_header, out_image,
                 img_dir=img_dir, exact=True)


    # Pixel areas and weights
    if weights_file or not density:
        area_file = '_area'.join(os.path.splitext(out_image))
        area, hdr = astropy.io.fits.getdata(area_file, header=True)  # steradians
        area *= (180/np.pi*3600)**2  # arcsec2
        dx, dy = wcs.calc_pixscale(hdr, ref='crpix').arcsec
        pixarea = dx * dy  # arcsec2
        area = np.clip(area, 0, pixarea)  # Don't care about overlaps
        if weights_file:
            weights = area / pixarea  # Normalize to 1
            hdu = astropy.io.fits.PrimaryHDU(weights, header=hdr)
            try:
                hdu.writeto(weights_file)
            except IOError:
                os.remove(weights_file)
                hdu.writeto(weights_file)


    # Write final mosaic
    dirname = os.path.dirname(mosaic_file)
    try:
        os.makedirs(dirname)
    except OSError:
        pass

    if postprocess or not density:
        # Create new file
        data, hdr = astropy.io.fits.getdata(out_image, header=True)

        if not density:
            # Convert flux density into total flux
            data *= pixarea

        if postprocess:
            data, hdr = postprocess(data, hdr)

        # Write
        hdu = astropy.io.fits.PrimaryHDU(data, header=hdr)
        try:
            hdu.writeto(mosaic_file)
        except IOError:
            os.remove(mosaic_file)
            hdu.writeto(mosaic_file)

    else:
        # Move existing file
        os.rename(out_image, mosaic_file)

    return
示例#14
0
    def mosaic_band(self,band,ra,dec,margin,radius,pgc,survey,remove_bkgrd=False):
        '''
        Input: source info param
        Create a mosaic fit file for the specified band.
        Return: String filename of resulting mosaic
        '''
        print ("------------------mosaic_band----------------------")
        print ("Now mosaic_band on {}".format(pgc))
        output = open("../rc3_galaxies_outside_{}_footprint".format(survey.name),'a') # 'a' for append #'w')
        unclean = open("../rc3_galaxies_unclean_{}".format(survey.name),"a")
        filename = str(ra)+str(dec)
        if (DEBUG) : print ("Querying data that lies inside margin")
        print (ra,dec,margin)
        result = survey.data_server.surveyFieldConverter(float(ra),float(dec),float(margin))
        clean_result = survey.data_server.surveyFieldConverter(float(ra),float(dec),float(margin),True)
        clean = True
        if(DEBUG):print ("result: "+str(result))
        if(DEBUG):print ("clean_result: "+str(clean_result))
        
        if (len(result)!=len(clean_result)and band=='u'):
            # Only print this once in the u band. 
            # Assume that if it is unclean in u band (ex. cosmic ray, bright star..etc) then it must be unclean in the other bands too.
            print ("Data contain unclean images")
            clean=False
            unclean.write("{}     {}     {}     {} \n".format(self.rc3_ra,self.rc3_dec,self.rc3_radius,self.pgc))

        if (len(result)==0):             
            if (DEBUG): print ('The given ra, dec of this galaxy does not lie in the survey footprint. Onto the next galaxy!')#Exit Program.'
            output.write("{}     {}     {}     {} \n".format(str(ra),str(dec),str(radius),str(pgc)))
            return -1 #special value reserved for not in survey footprint galaxies
        else :
            if (DEBUG): 
                print ( "Complete Query. These data lies within margin: ")
                print (result)

        os.mkdir(band)
        os.chdir(band)
        os.mkdir ("rawdir")
        os.mkdir ("projdir")
        if (remove_bkgrd):
            os.mkdir ("diffdir")
            os.mkdir ("corrdir")
            # os.mkdir("final")

        if (DEBUG): print ("Retrieving data from server for "+ band +"band")
        os.chdir("rawdir")
        out=""
        # Raw Imaging Data naming
        for i in result :  
            if (survey.data_server.name=='Gator'):
                survey.data_server.getData(band,ra,dec,margin,survey)
                out = i # 2MASS designation
                print out
            elif (survey.data_server.name=='SkyServer'):
                survey.data_server.getData(band,str(i[0]), str(i[1]),str(i[2]))
                # run-camcol-field
                out = "frame-"+str(band)+"-"+str(i[0]).zfill(6)+"-"+str(i[1])+"-"+str(i[2]).zfill(4)
            elif (survey.data_server.name=='DSSServer'):
                survey.data_server.getData(band,ra,dec,margin)
                # Patch for when we can not pass in th pgc number in getData of dssServer class, we rename the file here to conform with RC3's filename expectation for the imaging data
                raw_data = glob.glob("raw_*.fits")
                print (raw_data)
                for i in raw_data:
                    os.rename(i,"DSS_{}_{}.fits".format(band, self.pgc))    
                out = "raw_{}_{}".format(band,self.pgc)
                print ("dss_out: "+out)
            else:
                raise TypeError("Missing implementation for data retrieval")
        os.chdir("../")
        if (DEBUG) : print("Creating mosaic for "+band+" band.")
        outfile_r = "{}_{}_{}r.fits".format(survey.name,band,self.pgc)
        outfile = "{}_{}_{}.fits".format(survey.name,band,self.pgc)
        if (len(result)==1):
            #With header info, len of processed result list is 1 if there is only 1 field lying in the margin, simply do mSubImage without mosaicing
            print ("Only one field in region of interest")
            os.chdir("rawdir")
            if (DEBUG):print ("m:{}".format(margin))
            try:
                if (DEBUG):print ("2m:{}".format(2*margin))
                if (DEBUG):print ([outfile_r,outfile,ra,dec,2*margin])
                montage.mSubimage(outfile,outfile,ra,dec,2*margin) # mSubImage takes xsize which should be twice the margin (margin measures center to edge of image)
            except(montage.status.MontageError):
                print ("montage_wrapper.status.MontageError: mSubimage: Region outside image.")
                try :#give it one last chance
                    if (DEBUG):print ("lastchancem:{}".format(margin))
                    montage.mSubimage(out+".fits",outfile,ra,dec,margin)
                except(montage.status.MontageError):
                    print("Doesn't work after trying half the margin, just keep the raw FITS file")
                    if (DEBUG):print (out+".fits")
                    if (DEBUG):print (outfile)
                    shutil.move(out+".fits","../..")
                    os.chdir("../../")
                    os.rename(out+".fits",outfile)
                    os.system("rm -r {}".format(survey.best_band))
                    return outfile
                if (DEBUG):print (os.getcwd())
                os.chdir("../../") #Get out of directory for that galaxy and move on
                os.system("rm -r {}".format(survey.best_band))
                if (DEBUG):print(os.getcwd())
                failed_msubimage = open ("failed_msubimage","a")
                failed_msubimage.write("{}     {}     {}     {} \n".format(str(ra),str(dec),str(radius),str(pgc)))
                return -1 # masking with special value reserved for not in survey footprint galaxies
        
            hdulist = pyfits.open(outfile)
            if (os.path.exists("../../"+outfile)):
                os.system("rm ../../"+outfile)
            shutil.move(outfile,"../..")
            os.chdir("../..")
        else:
            imgtbl="images-rawdir.tbl"
            hdr="template.hdr"
            montage.mImgtbl("rawdir",imgtbl)
            # montage.mHdr(str(ra)+" "+str(dec),margin,out+".hdr")
            montage.mMakeHdr(imgtbl,hdr)
            if (DEBUG): print ("Reprojecting images")
            # os.chdir("rawdir")
            if (DEBUG):print(os.getcwd())
            montage.mProjExec(imgtbl,hdr,"projdir", "stats.tbl",raw_dir="rawdir")#, mpi=enable_mpi,debug=True) 
            if os.listdir("projdir") == []: 
                print "Projection Failed. No projected images produced. Skip to the next galaxy" 
                os.chdir("../") #Get out of directory for that galaxy and move on
                os.system("rm -r {}".format(survey.best_band))
                failed_projection = open ("failed_projection","a")
                failed_projection.write("{}     {}     {}     {} \n".format(str(ra),str(dec),str(radius),str(pgc)))
                return -1 # masking with special value reserved for not in survey footprint galaxies
            if (remove_bkgrd): 
                if (DEBUG): print "Calling the bash script containing Montage routines to rectify the background" 
                if os.getcwd()[-4:-2]==str(pgc):
                    os.system("bash ../../mosaic.sh")
                else:
                    os.system("bash ../mosaic.sh")
                print "mSubimage"
                montage.mSubimage("mosaic.fits" ,"mosaic.fits",ra,dec,2*margin) # mSubImage takes xsize which should be twice the margin (margin measures center to edge of image)
                shutil.move("mosaic.fits","../{}".format(outfile_r))#if change to :-11 then move out of u,g,r,i,z directory, may be more convenient for mJPEG
                if (DEBUG) : print ("Completed Mosaic for " + band)
            else:
                montage.mImgtbl("projdir","pimages.tbl")
                os.chdir("projdir")
                montage.mAdd("../pimages.tbl","../"+hdr,"{}_{}.fits".format(survey.name,out))#, mpi=enable_mpi)
                montage.mSubimage("{}_{}.fits".format(survey.name,out),outfile_r,ra,dec,2*margin) # mSubImage takes xsize which should be twice the margin (margin measures center to edge of image)
                shutil.move(outfile_r,"../../{}".format(outfile_r) )#if change to :-11 then move out of u,g,r,i,z directory, may be more convenient for mJPEG
                if (DEBUG) : print ("Completed Mosaic for " + band)
                os.chdir("..")
            os.chdir("../")
            hdulist = pyfits.open(outfile_r)

        hdulist[0].header['RA']=float(ra)
        hdulist[0].header['DEC']=float(dec)
        hdulist[0].header['RADIUS']=radius
        if (DEBUG):print ("Finished mosaic_band on {}".format(pgc))
        hdulist[0].header['PGC']=pgc
        hdulist[0].header['NED']=("http://ned.ipac.caltech.edu/cgi-bin/objsearch?objname="+ str(hdulist[0].header['PGC'])+"&extend=no&hconst=73&omegam=0.27&omegav=0.73&corr_z=1&out_csys=Equatorial&out_equinox=J2000.0&obj_sort=RA+or+Longitude&of=pre_text&zv_breaker=30000.0&list_limit=5&img_stamp=YES")
        hdulist[0].header['CLEAN']=clean
        hdulist[0].header['MARGIN']=margin

        if (os.path.exists(outfile)):
            os.system("rm "+ outfile)
        hdulist.writeto(outfile)
        if (os.path.exists(outfile_r)):
            os.system("rm "+outfile_r)
        os.system("rm -r {}".format(band))
        return outfile 
示例#15
0
def calc_noise(gal_dir,
               this_noise_dir,
               gal_hdr,
               mosaic_file,
               imtype,
               wttype,
               noisetype,
               window=False):
    """
    Calculate noise values and generate noise mosaic for each galaxy

    Parameters:
    -----------
    gal_dir : str path
        Path to temporary directory in which mosaic work is being done
    this_noise_dir : str path
        Path to directory within gal_dir where the noise work is completed
    gal_hdr : ascii file
        File containing the WCS data for each galaxy
    mosaic_file : str path
        Noise mosaic file that will be created
    imtype : str
        Type of input images to be used; e.g., 'int', 'intbgsub'
    wttype : str
        Type of weight images to be used; e.g., 'rrhr'
    noisetype : str
        Label for noise images; e.g., 'noise'
    window : bool
        Window across input images for pixel-by-pixel noise calculation; Default: False
    """

    # locate the input files from which to calculate noise
    input_noise_dir, imfiles = gather_input_images(gal_dir, this_noise_dir,
                                                   imtype)

    #now calculate noise
    print('...calculating noise in each input image...')
    for imfile in imfiles:
        if window:
            get_window_val(imfile, input_noise_dir, imtype, noisetype)
        else:
            get_single_val(imfile, input_noise_dir, imtype, noisetype)

    # gather weight images
    print('...moving weight images...')
    input_noise_wt_dir = gather_weight_images(gal_dir, this_noise_dir, wttype)
    im_dir = input_noise_dir
    wt_dir = input_noise_wt_dir

    # set up directories to house reprojected images
    reproj_noise_dir, reproj_noise_im_dir, reproj_noise_wt_dir = make_dirs(
        this_noise_dir, imtype, wttype, dirtype='reprojected')

    # reproject the noise and weight images
    print('...reprojecting...')
    reproject_images(gal_hdr.hdrfile_ext, im_dir, reproj_noise_im_dir,
                     noisetype)
    reproject_images(gal_hdr.hdrfile_ext, wt_dir, reproj_noise_wt_dir,
                     '{}_weight'.format(noisetype))
    im_dir = reproj_noise_im_dir
    wt_dir = reproj_noise_wt_dir

    # create metadata tables
    reproj_noise_im_tbl = os.path.join(im_dir,
                                       '{}_im_reproj.tbl'.format(noisetype))
    montage.mImgtbl(im_dir, reproj_noise_im_tbl, corners=True)
    reproj_noise_wt_tbl = os.path.join(wt_dir,
                                       '{}_wt_reproj.tbl'.format(noisetype))
    montage.mImgtbl(wt_dir, reproj_noise_wt_tbl, corners=True)

    # set up directories to house weighted images
    weight_dir, im_weight_dir, wt_weight_dir = make_dirs(this_noise_dir,
                                                         imtype,
                                                         wttype,
                                                         dirtype='weighted')

    # weight the images
    print('...weighting...')
    im_weight_dir, wt_weight_dir = weight_images(im_dir,
                                                 wt_dir,
                                                 weight_dir,
                                                 imtype=imtype,
                                                 wttype=wttype,
                                                 noisetype=noisetype)
    im_dir = im_weight_dir
    wt_dir = wt_weight_dir

    # need to add in quadrature, so copy noise files and square the array
    reproj_square_dir = square_images(this_noise_dir, im_dir, noisetype)
    im_dir = reproj_square_dir

    # now coadd the squared images and also create a count image
    mosaic_noise_dir = os.path.join(this_noise_dir, 'mosaic')
    if not os.path.exists(mosaic_noise_dir):
        os.makedirs(mosaic_noise_dir)

    print('...coadding...')
    coadd(gal_hdr.hdrfile,
          mosaic_noise_dir,
          im_dir,
          reproj_noise_im_tbl,
          output='{}_im_squared_weighted'.format(noisetype),
          add_type='mean')
    coadd(gal_hdr.hdrfile,
          mosaic_noise_dir,
          im_dir,
          reproj_noise_im_tbl,
          output='count',
          add_type='count')
    coadd(gal_hdr.hdrfile,
          mosaic_noise_dir,
          wt_dir,
          reproj_noise_wt_tbl,
          output='{}_wt_weighted'.format(noisetype),
          add_type='mean')

    # multiply the coadded squared image by the numbers in the count image to back out of the 'mean'
    mean_mosaic = os.path.join(mosaic_noise_dir,
                               'noise_im_squared_weighted_mosaic.fits')
    count_mosaic = os.path.join(mosaic_noise_dir, 'count_mosaic.fits')
    weight_mosaic = os.path.join(mosaic_noise_dir,
                                 'noise_wt_weighted_mosaic.fits')
    mean, mean_hdr = astropy.io.fits.getdata(mean_mosaic, header=True)
    counts, cnt_hdr = astropy.io.fits.getdata(count_mosaic, header=True)
    wt, wt_hdr = astropy.io.fits.getdata(weight_mosaic, header=True)

    total = mean * counts

    # now take the square root of the final image
    final_val = np.sqrt(total) / wt

    # write the final mosaic to file in the mosaic directory
    astropy.io.fits.writeto(mosaic_file, final_val, mean_hdr)
示例#16
0
def make_mosaic(band='fuv',
                ra_ctr=None,
                dec_ctr=None,
                size_deg=None,
                index=None,
                name=None,
                pgcname=None,
                model_bg=True,
                weight_ims=True,
                convert_mjysr=True,
                desired_pix_scale=GALEX_PIX_AS,
                imtype='intbgsub',
                wttype='rrhr',
                window=False):
    """
    Create noise of a galaxy in a single GALEX band.

    Parameters
    ----------
    band : str
        GALEX band to use
    ra_ctr : float
        Central RA of galaxy
    dec_ctr : float
        Central Dec of galaxy
    size_deg : float
        Desired side length of each cutout, in degrees
    index : array, optional
        Structured array containing the galbase information. The default is to read it in inside this code. (Default: None)
    name : str, optional
        Name of the galaxy for which to generate a cutout
    pgcname : str, optional
        PGC name of the galaxy
    model_bg : bool, optional
        Model the background of the mosaiced image (Default: False)
    weight_ims : bool, optional
         weight the input images with the weights images
    convert_mjysr : bool, optional
        convert input images from counts/sec to MJy/sr
    desired_pix_scale : float, optional
        Desired pixel scale of output image. Default is currently set to GALEX pixel scale (Default: 1.5)
    imtype : str, optional
        input image type to use from galex (Default: int)
    wttype : str, optional
        input weights image type to use from galex (Default: rrhr)
    window : bool, optional
        window across the input images rather than use a single value
    """
    ttype = 'galex'
    data_dir = os.path.join(_TOP_DIR, ttype, 'sorted_tiles')
    problem_file = os.path.join(_WORK_DIR,
                                'problem_galaxies_{}_noise.txt'.format(band))
    numbers_file = os.path.join(_WORK_DIR,
                                'gal_reproj_info_{}_noise.txt'.format(band))

    galaxy_noise_file = os.path.join(
        _MOSAIC_DIR, '_'.join([pgcname, band]).upper() + '_noise.fits')

    if not os.path.exists(galaxy_noise_file):
        start_time = time.time()
        print pgcname, band.upper()

        # READ THE INDEX FILE (IF NOT PASSED IN)
        if index is None:
            indexfile = os.path.join(_INDEX_DIR, 'galex_index_file.fits')
            ext = 1
            index, hdr = astropy.io.fits.getdata(indexfile, ext, header=True)

        # CALCULATE TILE OVERLAP
        tile_overlaps = calc_tile_overlap(ra_ctr,
                                          dec_ctr,
                                          pad=size_deg,
                                          min_ra=index['MIN_RA'],
                                          max_ra=index['MAX_RA'],
                                          min_dec=index['MIN_DEC'],
                                          max_dec=index['MAX_DEC'])

        # FIND OVERLAPPING TILES WITH RIGHT BAND
        #  index file set up such that index['fuv'] = 1 where fuv and
        #                              index['nuv'] = 1 where nuv
        ind = np.where((index[band]) & tile_overlaps)

        # MAKE SURE THERE ARE OVERLAPPING TILES
        ct_overlap = len(ind[0])
        if ct_overlap == 0:
            with open(problem_file, 'a') as myfile:
                myfile.write(pgcname + ': ' + 'No overlapping tiles\n')
            return

        pix_scale = desired_pix_scale / 3600.  # 1.5 arbitrary: how should I set it?

        try:
            # CREATE NEW TEMP DIRECTORY TO STORE TEMPORARY FILES
            gal_dir = os.path.join(_WORK_DIR, '_'.join([pgcname,
                                                        band]).upper())
            os.makedirs(gal_dir)

            # MAKE HEADER AND EXTENDED HEADER AND WRITE TO FILE
            gal_hdr = GalaxyHeader(pgcname,
                                   gal_dir,
                                   ra_ctr,
                                   dec_ctr,
                                   size_deg,
                                   pix_scale,
                                   factor=3)

            # GATHER THE INPUT FILES
            input_dir = os.path.join(gal_dir, 'input')
            if not os.path.exists(input_dir):
                os.makedirs(input_dir)
            nfiles = get_input(index, ind, data_dir, input_dir, hdr=gal_hdr)
            im_dir, wt_dir = input_dir, input_dir

            # WRITE TABLE OF INPUT IMAGE INFORMATION
            input_table = os.path.join(im_dir, 'input.tbl')
            montage.mImgtbl(im_dir, input_table, corners=True)

            if convert_mjysr:
                converted_dir = os.path.join(gal_dir, 'converted')
                if not os.path.exists(converted_dir):
                    os.makedirs(converted_dir)
                convert_to_flux_input(im_dir,
                                      converted_dir,
                                      band,
                                      desired_pix_scale,
                                      imtype=imtype)
                im_dir = converted_dir

            # MASK IMAGES
            masked_dir = os.path.join(gal_dir, 'masked')
            im_masked_dir = os.path.join(masked_dir, imtype)
            wt_masked_dir = os.path.join(masked_dir, wttype)
            for outdir in [masked_dir, im_masked_dir, wt_masked_dir]:
                os.makedirs(outdir)

            mask_images(im_dir,
                        wt_dir,
                        im_masked_dir,
                        wt_masked_dir,
                        imtype=imtype,
                        wttype=wttype)
            im_dir = im_masked_dir
            wt_dir = wt_masked_dir

            # CREATE DIRECTORY FOR NOISE IMAGES
            noise_dir = os.path.join(gal_dir, 'noise')
            if not os.path.exists(noise_dir):
                os.makedirs(noise_dir)

            # CALCULATE NOISE AND GENERATE NOISE MOSAIC CUTOUT
            noisetype = 'noise'
            calc_noise(gal_dir,
                       noise_dir,
                       gal_hdr,
                       galaxy_noise_file,
                       imtype,
                       wttype,
                       noisetype,
                       window=window)

            # REMOVE TEMP GALAXY DIRECTORY AND EXTRA FILES
            shutil.rmtree(gal_dir, ignore_errors=True)

            # NOTE TIME TO FINISH
            stop_time = time.time()
            total_time = (stop_time - start_time) / 60.

            # WRITE OUT THE NUMBER OF TILES THAT OVERLAP THE GIVEN GALAXY
            out_arr = [pgcname, band.upper(), nfiles, np.around(total_time, 2)]
            with open(numbers_file, 'a') as nfile:
                nfile.write('{0: >10}'.format(out_arr[0]))
                nfile.write('{0: >6}'.format(out_arr[1]))
                nfile.write('{0: >6}'.format(out_arr[2]))
                nfile.write('{0: >6}'.format(out_arr[3]) + '\n')

        # SOMETHING WENT WRONG -- WRITE ERROR TO FILE
        except Exception as inst:
            me = sys.exc_info()[0]
            with open(problem_file, 'a') as myfile:
                myfile.write(pgcname + ': ' + str(me) + ': ' + str(inst) +
                             '\n')
            shutil.rmtree(gal_dir, ignore_errors=True)

    return
示例#17
0
def IRIS_Query(name,
               ra,
               dec,
               width,
               band,
               bands_dict,
               temp_dir,
               montage_path=None):

    # If Montage commands directory provided, append it to path
    try:
        import montage_wrapper
    except:
        sys.path.append(montage_path)
        os.environ['PATH'] = os.environ['PATH'] + ':' + montage_path
        import montage_wrapper

    # Generate list of all IRIS plate fields in this band (which take form iYYYBXh0.fits, where YYY is a number between 001 and 430, and X is the field between 1 and 4)
    iris_url = 'https://irsa.ipac.caltech.edu/data/IRIS/images/'
    iris_fields = np.arange(1, 431).astype(str)
    iris_fields = [
        ''.join(['I', field.zfill(3), 'BXH0']) for field in iris_fields
    ]

    # Check if a folder for the raw IRIS plates exists in the temporary directory; if not, create it
    print('Ensuring all raw ' + bands_dict[band]['wavelength'] +
          'um IRAS-IRIS plates are available')
    band = bands_dict[band]['wavelength']
    raw_dir = os.path.join(temp_dir, 'Raw', band)
    if not os.path.exists(raw_dir):
        os.makedirs(raw_dir)

    # Look to see if all IRIS fields for this band are already present in the temporary directory; if not, wget them
    wget_list = []
    for iris_field in np.random.permutation(iris_fields):
        iris_ref_file = iris_field.replace(
            'X', bands_dict[band]['band_num']) + '.fits'
        iris_ref_path = os.path.join(raw_dir, iris_ref_file)
        if not os.path.exists(iris_ref_path):
            wget_list.append([iris_url + iris_ref_file, iris_ref_path])
    if len(wget_list) > 0:
        print(
            'Downloading raw ' + bands_dict[band]['wavelength'] +
            'um IRAS-IRIS plates (note that this will entail downloding up to ~4GB of data)'
        )
        if mp.current_process().name == 'MainProcess':
            joblib.Parallel( n_jobs=mp.cpu_count()-2 )\
                           ( joblib.delayed( IRIS_wget )\
                           ( wget_list[w][0], wget_list[w][1] )\
                           for w in range(len(wget_list)) )
        else:
            for w in range(len(wget_list)):
                os.system('curl ' + wget_list[w][0] + ' -o ' + '"' +
                          wget_list[w][1] + '"')

    # If image metadata table doesn't yet exist for this band, run mImgtbl over raw data to generate it
    mImgtbl_tablepath = os.path.join(raw_dir,
                                     'IRIS_' + band + '_Metadata_Table.tbl')
    if os.path.exists(mImgtbl_tablepath):
        os.remove(mImgtbl_tablepath)
    montage_wrapper.mImgtbl(raw_dir, mImgtbl_tablepath, corners=True)

    # Now that we know we have data, set up processing for this source in particular
    print('Computing overlap of ' + bands_dict[band]['wavelength'] +
          'um IRAS-IRIS plates with ' + name)
    ra, dec, width = float(ra), float(dec), float(width)
    pix_size = bands_dict[band]['pix_size']

    # Find which plates have coverage over our target region
    mCoverageCheck_tablepath = os.path.join(
        raw_dir, u'IRIS_' + band + '_Coverage_Table.tbl')
    if os.path.exists(mCoverageCheck_tablepath):
        os.remove(mCoverageCheck_tablepath)
    montage_wrapper.mCoverageCheck(mImgtbl_tablepath,
                                   mCoverageCheck_tablepath,
                                   ra=ra,
                                   dec=dec,
                                   mode='box',
                                   width=width)

    # Read in coveage tables; if no coverage, write null output file and stop here
    print('Reprojecting IRAS-IRIS ' + bands_dict[band]['wavelength'] +
          'um plates that cover ' + name)
    mCoverageCheck_table = np.genfromtxt(mCoverageCheck_tablepath,
                                         skip_header=3,
                                         dtype=None,
                                         encoding=None)
    if mCoverageCheck_table.size == 0:
        os.system('touch ' +
                  os.path.join(temp_dir, '.' + name + '_IRAS-IRIS_' + band +
                               '.null'))
        print('No IRAS-IRIS ' + band + 'um data for ' + name)
        return
    reproj_dir = os.path.join(temp_dir, 'Reproject', band)
    if not os.path.exists(reproj_dir):
        os.makedirs(reproj_dir)

    # Extract paths from coverage table, with handling for weird astropy behavior when table has only one row
    if mCoverageCheck_table.size == 1:
        raw_paths = [mCoverageCheck_table['f36'].tolist()]
    else:
        raw_paths = [
            str(mCoverageCheck_table['f36'][i])
            for i in range(mCoverageCheck_table['f36'].size)
        ]
    reproj_paths = [
        raw_paths[i].replace(raw_dir, reproj_dir)
        for i in range(len(raw_paths))
    ]
    reproj_hdr = FitsHeader(ra, dec, width, pix_size)

    # Reproject identified plates in turn (dealing with possible corrupt downloads, and stupid unecessary third axis, grrr)
    for i in range(len(raw_paths)):
        raw_path, reproj_path = raw_paths[i], reproj_paths[i]
        try:
            raw_img, raw_hdr = astropy.io.fits.getdata(raw_path,
                                                       header=True,
                                                       memmap=False)
        except:
            raw_url = iris_url + raw_path.split('/')[-1]
            os.system('curl ' + raw_url + ' -o ' + '"' + raw_path + '"')
            raw_img, raw_hdr = astropy.io.fits.getdata(raw_path,
                                                       header=True,
                                                       memmap=False)
        raw_hdr.set('NAXIS', value=2)
        raw_hdr.remove('NAXIS3')
        raw_hdr.remove('CRVAL3')
        raw_hdr.remove('CRPIX3')
        raw_hdr.remove('CTYPE3')
        raw_hdr.remove('CDELT3')
        raw_hdu = astropy.io.fits.PrimaryHDU(data=raw_img, header=raw_hdr)
        reproj_img = reproject.reproject_exact(raw_hdu,
                                               reproj_hdr,
                                               parallel=False)[0]
        astropy.io.fits.writeto(reproj_path,
                                data=reproj_img,
                                header=reproj_hdr,
                                overwrite=True)
        del (raw_hdu)
        del (raw_img)
        del (raw_hdr)
        gc.collect()

    # Now mosaic the reprojected images
    mosaic_list = []
    [
        mosaic_list.append(astropy.io.fits.getdata(reproj_path))
        for reproj_path in reproj_paths
    ]
    mosaic_array = np.array(mosaic_list)
    mosaic_img = np.nanmean(mosaic_array, axis=0)
    mosaic_hdr = FitsHeader(ra, dec, width, pix_size)
    """# Write finished mosaic to file
    astropy.io.fits.writeto(os.path.join(temp_dir,name+'_IRAS-IRIS_'+band+'.fits'), data=mosaic_img, header=mosaic_hdr, overwrite=True)"""

    # Check that target coords have coverage in mosaic
    mosaic_wcs = astropy.wcs.WCS(mosaic_hdr)
    mosaic_centre = mosaic_wcs.all_world2pix([[ra]], [[dec]],
                                             0,
                                             ra_dec_order=True)
    mosaic_i, mosaic_j = mosaic_centre[1][0], mosaic_centre[0][0]
    if np.isnan(mosaic_img[int(np.round(mosaic_i)), int(np.round(mosaic_j))]):
        os.system('touch ' +
                  os.path.join(temp_dir, '.' + name + '_IRAS-IRIS_' + band +
                               '.null'))
        print('No IRAS-IRIS ' + band + 'um data for ' + name)

    # If mosaic is good, write it to temporary directory
    else:
        astropy.io.fits.writeto(os.path.join(
            temp_dir, name + '_IRAS-IRIS_' + band + '.fits'),
                                data=mosaic_img,
                                header=mosaic_hdr,
                                overwrite=True)
示例#18
0
def galex(band='fuv',
          ra_ctr=None,
          dec_ctr=None,
          size_deg=None,
          index=None,
          name=None,
          pgcname=None,
          model_bg=False,
          weight_ims=False,
          convert_mjysr=False,
          desired_pix_scale=GALEX_PIX_AS,
          imtype='int',
          wttype='rrhr'):
    """
    Create cutouts of a galaxy in a single GALEX band.

    Parameters
    ----------
    band : str
        GALEX band to use
    ra_ctr : float
        Central RA of galaxy
    dec_ctr : float
        Central Dec of galaxy
    size_deg : float
        Desired side length of each cutout, in degrees
    index : array, optional
        Structured array containing the galbase information. The default is to read it in inside this code. (Default: None)
    name : str, optional
        Name of the galaxy for which to generate a cutout
    model_bg : bool, optional
        Model the background of the mosaiced image (Default: False)
    weight_ims : bool, optional
         weight the input images with the weights images
    convert_mjysr : bool, optional
        convert input images from counts/sec to MJy/sr
    desired_pix_scale : float, optional
        Desired pixel scale of output image. Default is currently set to GALEX pixel scale (Default: 1.5)
    imtype : str, optional
        input image type to use from galex (Default: int)
    wttype : str, optional
        input weights image type to use from galex (Default: rrhr)
    """
    ttype = 'galex'
    data_dir = os.path.join(_TOP_DIR, ttype, 'sorted_tiles')
    problem_file = os.path.join(_WORK_DIR, 'problem_galaxies_{}.txt'.format(
        band))  # 'problem_galaxies_' + band + '.txt')
    bg_reg_file = os.path.join(_WORK_DIR, 'galex_reprojected_bg.reg')
    numbers_file = os.path.join(_WORK_DIR, 'gal_reproj_info_{}.txt'.format(
        band))  # 'gal_reproj_info_' + band + '.dat')

    galaxy_mosaic_file = os.path.join(_MOSAIC_DIR,
                                      '_'.join([name, band]).upper() + '.FITS')

    if not os.path.exists(galaxy_mosaic_file):
        start_time = time.time()
        print name, band.upper()

        # READ THE INDEX FILE (IF NOT PASSED IN)
        if index is None:
            indexfile = os.path.join(_INDEX_DIR, 'galex_index_file.fits')
            ext = 1
            index, hdr = astropy.io.fits.getdata(indexfile, ext, header=True)

        # CALCULATE TILE OVERLAP
        tile_overlaps = calc_tile_overlap(ra_ctr,
                                          dec_ctr,
                                          pad=size_deg,
                                          min_ra=index['MIN_RA'],
                                          max_ra=index['MAX_RA'],
                                          min_dec=index['MIN_DEC'],
                                          max_dec=index['MAX_DEC'])

        # FIND OVERLAPPING TILES WITH RIGHT BAND
        #  index file set up such that index['fuv'] = 1 where fuv and
        #                              index['nuv'] = 1 where nuv
        ind = np.where((index[band]) & tile_overlaps)

        # MAKE SURE THERE ARE OVERLAPPING TILES
        ct_overlap = len(ind[0])
        if ct_overlap == 0:
            with open(problem_file, 'a') as myfile:
                myfile.write(name + ': ' + 'No overlapping tiles\n')
            return

        pix_scale = desired_pix_scale / 3600.  # 1.5 arbitrary: how should I set it?

        try:
            # CREATE NEW TEMP DIRECTORY TO STORE TEMPORARY FILES
            gal_dir = os.path.join(_WORK_DIR, '_'.join([name, band]).upper())
            os.makedirs(gal_dir)

            # MAKE HEADER AND EXTENDED HEADER AND WRITE TO FILE
            gal_hdr = GalaxyHeader(name,
                                   gal_dir,
                                   ra_ctr,
                                   dec_ctr,
                                   size_deg,
                                   pix_scale,
                                   factor=3)

            # GATHER THE INPUT FILES
            input_dir = os.path.join(gal_dir, 'input')
            os.makedirs(input_dir)
            nfiles = get_input(index, ind, data_dir, input_dir, hdr=gal_hdr)
            im_dir, wt_dir = input_dir, input_dir

            # WRITE TABLE OF INPUT IMAGE INFORMATION
            input_table = os.path.join(im_dir, 'input.tbl')
            montage.mImgtbl(im_dir, input_table, corners=True)

            #set_trace()
            if convert_mjysr:
                converted_dir = os.path.join(gal_dir, 'converted')
                if not os.path.exists(converted_dir):
                    os.makedirs(converted_dir)
                convert_to_flux_input(im_dir,
                                      converted_dir,
                                      band,
                                      desired_pix_scale,
                                      imtype=imtype)
                im_dir = converted_dir

            # MASK IMAGES
            masked_dir = os.path.join(gal_dir, 'masked')
            im_masked_dir = os.path.join(masked_dir, imtype)
            wt_masked_dir = os.path.join(masked_dir, wttype)
            for outdir in [masked_dir, im_masked_dir, wt_masked_dir]:
                os.makedirs(outdir)

            mask_images(im_dir,
                        wt_dir,
                        im_masked_dir,
                        wt_masked_dir,
                        imtype=imtype,
                        wttype=wttype)
            im_dir = im_masked_dir
            wt_dir = wt_masked_dir

            # REPROJECT IMAGES WITH EXTENDED HEADER
            reprojected_dir = os.path.join(gal_dir, 'reprojected')
            reproj_im_dir = os.path.join(reprojected_dir, imtype)
            reproj_wt_dir = os.path.join(reprojected_dir, wttype)
            for outdir in [reprojected_dir, reproj_im_dir, reproj_wt_dir]:
                os.makedirs(outdir)

            reproject_images(gal_hdr.hdrfile_ext, im_dir, reproj_im_dir,
                             imtype)
            reproject_images(gal_hdr.hdrfile_ext, wt_dir, reproj_wt_dir,
                             wttype)
            im_dir = reproj_im_dir
            wt_dir = reproj_wt_dir

            # MODEL THE BACKGROUND IN THE IMAGE FILES WITH THE EXTENDED HEADER
            if model_bg:
                bg_model_dir = os.path.join(gal_dir, 'background_model')
                diff_dir = os.path.join(bg_model_dir, 'differences')
                corr_dir = os.path.join(bg_model_dir, 'corrected')
                for outdir in [bg_model_dir, diff_dir, corr_dir]:
                    os.makedirs(outdir)
                bg_model(im_dir,
                         bg_model_dir,
                         diff_dir,
                         corr_dir,
                         gal_hdr.hdrfile_ext,
                         im_type=imtype,
                         level_only=False)
                im_dir = os.path.join(corr_dir, 'int')

            # WEIGHT IMAGES
            if weight_ims:
                weight_dir = os.path.join(gal_dir, 'weighted')
                im_weight_dir = os.path.join(weight_dir, imtype)
                wt_weight_dir = os.path.join(weight_dir, wttype)
                for outdir in [weight_dir, im_weight_dir, wt_weight_dir]:
                    os.makedirs(outdir)

                weight_images(im_dir,
                              wt_dir,
                              weight_dir,
                              im_weight_dir,
                              wt_weight_dir,
                              imtype=imtype,
                              wttype=wttype)
                im_dir = im_weight_dir
                wt_dir = wt_weight_dir

            # CREATE THE METADATA TABLES NEEDED FOR COADDITION
            weight_table = create_table(wt_dir, dir_type='weights')
            weighted_table = create_table(im_dir, dir_type='int')

            # COADD THE REPROJECTED, WEIGHTED IMAGES AND THE WEIGHT IMAGES WITH THE REGULAR HEADER FILE
            penultimate_dir = os.path.join(gal_dir, 'large_mosaic')
            final_dir = os.path.join(gal_dir, 'mosaic')
            for outdir in [penultimate_dir, final_dir]:
                os.makedirs(outdir)

            coadd(gal_hdr.hdrfile,
                  penultimate_dir,
                  im_dir,
                  output='int',
                  add_type='mean')
            coadd(gal_hdr.hdrfile,
                  penultimate_dir,
                  wt_dir,
                  output='weights',
                  add_type='mean')

            # DIVIDE OUT THE WEIGHTS AND CONVERT TO MJY/SR
            imagefile, wtfile = finish_weight(penultimate_dir)

            #if convert_mjysr:
            #    convert_to_flux_final(imagefile, band, desired_pix_scale)

            # SUBTRACT OUT THE BACKGROUND
            rm_overall_bg = False
            if rm_overall_bg:
                remove_background(final_dir, imagefile, bg_reg_file)
            else:
                outfile = os.path.join(final_dir, 'final_mosaic.fits')
                shutil.copy(imagefile, outfile)

            # copy weights mosaic to final directory
            shutil.copy(wtfile, os.path.join(final_dir, 'weights_mosaic.fits'))

            # COPY MOSAIC FILES TO CUTOUTS DIRECTORY
            mosaic_file = os.path.join(final_dir, 'final_mosaic.fits')
            weight_file = os.path.join(final_dir, 'weights_mosaic.fits')

            newsuffs = ['.FITS', '_weight.FITS']
            oldfiles = [mosaic_file, weight_file]
            newfiles = ['_'.join([name, band]).upper() + s for s in newsuffs]

            for files in zip(oldfiles, newfiles):
                shutil.copy(files[0], os.path.join(_MOSAIC_DIR, files[1]))

            # REMOVE TEMP GALAXY DIRECTORY AND EXTRA FILES
            shutil.rmtree(gal_dir, ignore_errors=True)

            # NOTE TIME TO FINISH
            stop_time = time.time()
            total_time = (stop_time - start_time) / 60.

            # WRITE OUT THE NUMBER OF TILES THAT OVERLAP THE GIVEN GALAXY
            out_arr = [name, band.upper(), nfiles, np.around(total_time, 2)]
            with open(numbers_file, 'a') as nfile:
                nfile.write('{0: >10}'.format(out_arr[0]))
                nfile.write('{0: >6}'.format(out_arr[1]))
                nfile.write('{0: >6}'.format(out_arr[2]))
                nfile.write('{0: >6}'.format(out_arr[3]) + '\n')
                #nfile.write(name + ': ' + str(len(infiles)) + '\n')

        # SOMETHING WENT WRONG -- WRITE ERROR TO FILE
        except Exception as inst:
            me = sys.exc_info()[0]
            with open(problem_file, 'a') as myfile:
                myfile.write(name + ': ' + str(me) + ': ' + str(inst) + '\n')
            shutil.rmtree(gal_dir, ignore_errors=True)

    return
示例#19
0
def _montage_test():
    # create density images

    input_dir = os.path.dirname(density_files[0])

    # image metadata
    meta1_file = os.path.join(input_dir, 'meta1.tbl')
    montage.mImgtbl(input_dir, meta1_file, corners=True)

    # make header
    #lon, lat = [], []
    #for density_file in density_files:
    #    data, hdr = astropy.io.fits.getdata(density_file, header=True)
    #    wcs = astropy.wcs.WCS(hdr)
    #    x1, y1 = 0.5, 0.5
    #    y2, x2 = data.shape
    #    x2, y2 = x2 + 0.5, y2 + 0.5
    #    x, y = [x1, x2, x2, x1], [y1, y1, y2, y2]
    #    ln, lt = wcs.wcs_pix2world(x, y, 1)
    #    lon += list(ln)
    #    lat += list(lt)
    #lon1, lon2 = np.min(lon), np.max(lon)
    #lat1, lat2 = np.min(lat), np.max(lat)
    hdr_file = os.path.join(os.path.dirname(input_dir), 'test.hdr')
    montage.mMakeHdr(meta1_file, hdr_file)

    # reproject
    proj_dir = os.path.dirname(proj_files[0])
    safe_mkdir(proj_dir)
    stats_file = os.path.join(proj_dir, 'stats.tbl')
    montage.mProjExec(meta1_file, hdr_file, proj_dir, stats_file,
                      raw_dir=input_dir, exact=True)

    # image metadata
    meta2_file = os.path.join(proj_dir, 'meta2.tbl')
    montage.mImgtbl(proj_dir, meta2_file, corners=True)

    # Background modeling
    diff_dir = os.path.join(os.path.dirname(proj_dir), 'difference')
    safe_mkdir(diff_dir)
    diff_file = os.path.join(diff_dir, 'diffs.tbl')
    montage.mOverlaps(meta2_file, diff_file)
    montage.mDiffExec(diff_file, hdr_file, diff_dir, proj_dir)
    fits_file = os.path.join(diff_dir, 'fits.tbl')
    montage.mFitExec(diff_file, fits_file, diff_dir)

    # Background matching
    corr_dir = os.path.join(os.path.dirname(proj_dir), 'correct')
    safe_mkdir(corr_dir)
    corr_file = os.path.join(corr_dir, 'corrections.tbl')
    montage.mBgModel(meta2_file, fits_file, corr_file, level_only=False)
    montage.mBgExec(meta2_file, corr_file, corr_dir, proj_dir=proj_dir)

    # Native mosaic
    projadd_file = config.path('{:s}.reproject.add'.format(kind))
    projadd_dir, filename = os.path.split(projadd_file)
    filename, ext = os.path.splitext(filename)
    filename = '{0:s}_native{1:s}'.format(filename, ext)
    projaddnative_file = os.path.join(projadd_dir, filename)
    safe_mkdir(projadd_dir)
    montage.mAdd(meta2_file, hdr_file, projaddnative_file, img_dir=corr_dir, exact=True)

    # Reproject to final header
    header_file = config.path('{:s}.hdr'.format(kind))
    montage.mProject(projaddnative_file, projadd_file, header_file)

    # Postprocess
    data, hdr = astropy.io.fits.getdata(projaddnative_file, header=True)
    x1, x2 = 900, 1900
    y1, y2 = 3000, 4500
    val = np.mean(data[y1:y2,x1:x2])

    data, hdr = astropy.io.fits.getdata(projadd_file, header=True)
    data = data - val
    areaadd_file = config.path('{:s}.area.add'.format(kind))
    area = astropy.io.fits.getdata(areaadd_file) * (180/np.pi*3600)**2 # arcsec2
    data = data * area

    add_file = config.path('{:s}.add'.format(kind))
    dirname = os.path.dirname(add_file)
    safe_mkdir(dirname)
    if os.path.exists(add_file):
        os.remove(add_file)
    hdu = astropy.io.fits.PrimaryHDU(data, header=hdr)
    hdu.writeto(add_file)
示例#20
0
文件: rgb.py 项目: migueldvb/aplpy
def make_rgb_cube(files, output, north=False, system=None, equinox=None):
    '''
    Make an RGB data cube from a list of three FITS images.

    This method can read in three FITS files with different
    projections/sizes/resolutions and uses Montage to reproject
    them all to the same projection.

    Two files are produced by this function. The first is a three-dimensional
    FITS cube with a filename give by `output`, where the third dimension
    contains the different channels. The second is a two-dimensional FITS
    image with a filename given by `output` with a `_2d` suffix. This file
    contains the mean of the different channels, and is required as input to
    FITSFigure if show_rgb is subsequently used to show a color image
    generated from the FITS cube (to provide the correct WCS information to
    FITSFigure).

    Parameters
    ----------

    files : tuple or list
       A list of the filenames of three FITS filename to reproject.
       The order is red, green, blue.

    output : str
       The filename of the output RGB FITS cube.

    north : bool, optional
       By default, the FITS header generated by Montage represents the
       best fit to the images, often resulting in a slight rotation. If
       you want north to be straight up in your final mosaic, you should
       use this option.

    system : str, optional
       Specifies the system for the header (default is EQUJ).
       Possible values are: EQUJ EQUB ECLJ ECLB GAL SGAL

    equinox : str, optional
       If a coordinate system is specified, the equinox can also be given
       in the form YYYY. Default is J2000.
    '''

    # Check whether the Python montage module is installed. The Python module
    # checks itself whether the Montage command-line tools are available, and
    # if they are not then importing the Python module will fail.
    try:
        import montage_wrapper as montage
    except ImportError:
        raise Exception("Both the Montage command-line tools and the"
                        " montage-wrapper Python module are required"
                        " for this function")

    # Check that input files exist
    for f in files:
        if not os.path.exists(f):
            raise Exception("File does not exist : " + f)

    # Create work directory
    work_dir = tempfile.mkdtemp()

    raw_dir = '%s/raw' % work_dir
    final_dir = '%s/final' % work_dir

    images_raw_tbl = '%s/images_raw.tbl' % work_dir
    header_hdr = '%s/header.hdr' % work_dir

    # Create raw and final directory in work directory
    os.mkdir(raw_dir)
    os.mkdir(final_dir)

    # Create symbolic links to input files
    for i, f in enumerate(files):
        os.symlink(os.path.abspath(f), '%s/image_%i.fits' % (raw_dir, i))

    # List files and create optimal header
    montage.mImgtbl(raw_dir, images_raw_tbl, corners=True)
    montage.mMakeHdr(images_raw_tbl,
                     header_hdr,
                     north_aligned=north,
                     system=system,
                     equinox=equinox)

    # Read header in with astropy.io.fits
    header = fits.Header.fromtextfile(header_hdr)

    # Find image dimensions
    nx = int(header['NAXIS1'])
    ny = int(header['NAXIS2'])

    # Generate emtpy datacube
    image_cube = np.zeros((len(files), ny, nx), dtype=np.float32)

    # Loop through files
    for i in range(len(files)):

        # Reproject channel to optimal header
        montage.reproject('%s/image_%i.fits' % (raw_dir, i),
                          '%s/image_%i.fits' % (final_dir, i),
                          header=header_hdr,
                          exact_size=True,
                          bitpix=-32)

        # Read in and add to datacube
        image_cube[i, :, :] = fits.getdata('%s/image_%i.fits' % (final_dir, i))

    # Write out final cube
    fits.writeto(output, image_cube, header, clobber=True)

    # Write out collapsed version of cube
    fits.writeto(output.replace('.fits', '_2d.fits'), \
                   np.mean(image_cube, axis=0), header, clobber=True)

    # Remove work directory
    shutil.rmtree(work_dir)
示例#21
0
文件: rgb.py 项目: EDrabek/aplpy
def make_rgb_cube(files, output, north=False, system=None, equinox=None):
    '''
    Make an RGB data cube from a list of three FITS images.

    This method can read in three FITS files with different
    projections/sizes/resolutions and uses Montage to reproject
    them all to the same projection.

    Two files are produced by this function. The first is a three-dimensional
    FITS cube with a filename give by `output`, where the third dimension
    contains the different channels. The second is a two-dimensional FITS
    image with a filename given by `output` with a `_2d` suffix. This file
    contains the mean of the different channels, and is required as input to
    FITSFigure if show_rgb is subsequently used to show a color image
    generated from the FITS cube (to provide the correct WCS information to
    FITSFigure).

    Parameters
    ----------

    files : tuple or list
       A list of the filenames of three FITS filename to reproject.
       The order is red, green, blue.

    output : str
       The filename of the output RGB FITS cube.

    north : bool, optional
       By default, the FITS header generated by Montage represents the
       best fit to the images, often resulting in a slight rotation. If
       you want north to be straight up in your final mosaic, you should
       use this option.

    system : str, optional
       Specifies the system for the header (default is EQUJ).
       Possible values are: EQUJ EQUB ECLJ ECLB GAL SGAL

    equinox : str, optional
       If a coordinate system is specified, the equinox can also be given
       in the form YYYY. Default is J2000.
    '''

    # Check whether the Python montage module is installed. The Python module
    # checks itself whether the Montage command-line tools are available, and
    # if they are not then importing the Python module will fail.
    try:
        import montage_wrapper as montage
    except ImportError:
        raise Exception("Both the Montage command-line tools and the"
                        " montage-wrapper Python module are required"
                        " for this function")

    # Check that input files exist
    for f in files:
        if not os.path.exists(f):
            raise Exception("File does not exist : " + f)

    # Create work directory
    work_dir = tempfile.mkdtemp()

    raw_dir = '%s/raw' % work_dir
    final_dir = '%s/final' % work_dir

    images_raw_tbl = '%s/images_raw.tbl' % work_dir
    header_hdr = '%s/header.hdr' % work_dir

    # Create raw and final directory in work directory
    os.mkdir(raw_dir)
    os.mkdir(final_dir)

    # Create symbolic links to input files
    for i, f in enumerate(files):
        os.symlink(os.path.abspath(f), '%s/image_%i.fits' % (raw_dir, i))

    # List files and create optimal header
    montage.mImgtbl(raw_dir, images_raw_tbl, corners=True)
    montage.mMakeHdr(images_raw_tbl, header_hdr, north_aligned=north, system=system, equinox=equinox)

    # Read header in with astropy.io.fits
    header = fits.Header.fromtextfile(header_hdr)

    # Find image dimensions
    nx = int(header['NAXIS1'])
    ny = int(header['NAXIS2'])

    # Generate emtpy datacube
    image_cube = np.zeros((len(files), ny, nx), dtype=np.float32)

    # Loop through files
    for i in range(len(files)):

        # Reproject channel to optimal header
        montage.reproject('%s/image_%i.fits' % (raw_dir, i),
                          '%s/image_%i.fits' % (final_dir, i),
                          header=header_hdr, exact_size=True, bitpix=-32)

        # Read in and add to datacube
        image_cube[i, :, :] = fits.getdata('%s/image_%i.fits' % (final_dir, i))

    # Write out final cube
    fits.writeto(output, image_cube, header, clobber=True)

    # Write out collapsed version of cube
    fits.writeto(output.replace('.fits', '_2d.fits'), \
                   np.mean(image_cube, axis=0), header, clobber=True)

    # Remove work directory
    shutil.rmtree(work_dir)
def re_project(orig_dir,
               Name_f0,
               proj_dir,
               Name_f1,
               para,
               exact=True,
               clear_files=True):
    '''
    To re-project fits files using montage_wrapper.

    Inputs
        orig_dir: directory of fits to be re-projected, should omit 
                  unnecessary '/'
        Name_f0: list of fits names before re-projection
        proj_dir: directory of re-projected fits, DO NOT LET proj_dir=orig_dir!
        Name_f1: list of fits names after re-projection, ordered like Name_f0
        para: list of parameters defining the framework of re-projected image,
              which includes:

                NAXIS1: integer
                NAXIS2: integer
                projection: 3-letter string, e.g. 'TAN'
                CRVAL1
                CRVAL2
                CRPIX1
                CRPIX2
                CD1_1
                CD1_2
                CD2_1
                CD2_2

        exact: whether the output shape exactly match the FITS header
        clear_files: whether to delete intermediate files
    
    Outputs
        re-projected fits in proj_dir

    Caveats
        1. This code is still in test.
        2. Only tested for 2D fits with one hdu.
        3. It's quite often that "exact=True" doesn't work, try to use mAdd. 
            Sometimes mAdd gives fits with zero file sizes. Maybe mAdd can't 
            handle single fits.
    '''

    # create a folder containing the original fits
    raw_dir = orig_dir + '/fits_orig'
    os.mkdir(raw_dir)
    for name_f in Name_f0:
        shutil.copy('%s/%s' % (orig_dir, name_f), raw_dir)

    # make images_table
    images_table = raw_dir + '/images_table.txt'
    mw.mImgtbl(raw_dir, images_table)

    # create header file
    (NAXIS1, NAXIS2, projection, CRVAL1, CRVAL2, CRPIX1, CRPIX2, CD1_1, CD1_2,
     CD2_1, CD2_2) = para
    f = open(raw_dir + '/header.txt', 'w')
    f.write('SIMPLE  = T\n')
    f.write('BITPIX  = -64\n')
    f.write('BUNIT  = none\n')
    f.write('NAXIS   = 2\n')
    f.write('NAXIS1  = %d\n' % NAXIS1)
    f.write('NAXIS2  = %d\n' % NAXIS2)
    f.write("CTYPE1  = 'RA---%s'\n" % projection)
    f.write("CTYPE2  = 'DEC--%s'\n" % projection)
    f.write('CRPIX1  = %d\n' % CRPIX1)
    f.write('CRPIX2  = %d\n' % CRPIX2)
    f.write('CRVAL1  = %f\n' % CRVAL1)
    f.write('CRVAL2  = %f\n' % CRVAL2)
    f.write('CD1_1   = %f\n' % CD1_1)
    f.write('CD1_2   = %f\n' % CD1_2)
    f.write('CD2_1   = %f\n' % CD2_1)
    f.write('CD2_2   = %f\n' % CD2_2)
    f.write('HISTORY =  By Yue Cao\n')
    f.write('END')
    f.close()

    # re-project
    stats_table = raw_dir + '/stats_table.txt'
    mw.mProjExec(images_table=images_table,
                 template_header=raw_dir + '/header.txt',
                 raw_dir=raw_dir,
                 proj_dir=proj_dir,
                 stats_table=stats_table,
                 exact=exact)

    # delete intermediate files
    if clear_files:
        shutil.rmtree(raw_dir)
        for name_f in Name_f0:
            n_f = name_f.split('.')[0]
            os.remove('%s/hdu0_%s_area.fits' % (proj_dir, n_f))

    # rename the re-projected fits
    for i in range(len(Name_f0)):
        os.rename('%s/hdu0_%s' % (proj_dir, Name_f0[i]),
                  '%s/%s' % (proj_dir, Name_f1[i]))

# def re_project(orig_dir,proj_dir,header,exact=True,clear_files=True):
    '''