def load_frame(self): """ This function ... :return: """ # Debugging log.debug("Loading the frame ...") # Determine the full path to the image #image_path = fs.absolute_path(config.image) # Import the image importer = ImageImporter() importer.run(image_path) # Get the primary image frame frame = importer.image.primary # Get the original header header = importer.image.original_header # Create a mask of the pixels that are NaNs nans = frame.nans() # Set the NaN pixels to zero in the frame frame[nans] = 0.0
level = "DEBUG" if arguments.debug else "INFO" # Initialize the logger log = logging.setup_log(level=level, path=logfile_path) log.start("Starting find_sources ...") # ----------------------------------------------------------------- # Determine the full path to the image image_path = fs.absolute(arguments.image) # Determine the full path to the bad region file bad_region_path = fs.join(input_path, arguments.bad) if arguments.bad is not None else None # Import the image importer = ImageImporter() importer.run(image_path, bad_region_path=bad_region_path) # Get the image image = importer.image # Get the mask of bad pixels bad_mask = image.masks.bad if "bad" in image.masks else None # ----------------------------------------------------------------- # Create a CatalogImporter instance catalog_importer = CatalogImporter() # Set file catalog options if arguments.filecatalog:
def PreCatalogue(source_dict, bands_dict, kwargs_dict): # If absoltuely no star subtractionis required for this source, immediately return if kwargs_dict['starsub'] == False: return if str(source_dict['starsub_bands_exclude']) == 'True': return # If star subtraction is possibly required, check band-by-band if kwargs_dict['starsub'] == True: star_sub_check = False for band in bands_dict.keys(): if bands_dict[band] == None: continue if bands_dict[band]['remove_stars'] == True: star_sub_check = True if star_sub_check == False: return # Now check that data actually exists for the bands in question for this source bands_check = [] for band in bands_dict.keys(): if bands_dict[band]['remove_stars'] == True: in_fitspath, file_found = CAAPR.CAAPR_Pipeline.FilePrelim( source_dict, bands_dict[band], kwargs_dict) bands_check.append(file_found) elif bands_dict[band]['remove_stars'] == False: bands_check.append(False) if True not in bands_check: return # If all checks passed, and star subtraction is required, inform user and make sure that AstroMagic temp directory is clear if kwargs_dict['verbose']: print '[' + source_dict[ 'name'] + '] PTS AstroMagic retrieving list of foreground stars in map from online catalogues.' if os.path.exists(os.path.join(kwargs_dict['temp_dir_path'], 'AstroMagic')): shutil.rmtree(os.path.join(kwargs_dict['temp_dir_path'], 'AstroMagic')) os.mkdir(os.path.join(kwargs_dict['temp_dir_path'], 'AstroMagic')) # If weird PTS user directory not present, create it pts_user_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'PTS', 'user', 'magic', 'catalogs') if not os.path.exists(pts_user_dir): os.makedirs(pts_user_dir) # Loop over each band, determining map sizes diam_max = 0.0 for band in bands_dict.keys(): band_dict = bands_dict[band] # Check that this band requires star subtraction if band_dict['remove_stars'] != True: continue # Determine fits path if os.path.isdir(band_dict['band_dir']): in_fitspath = os.path.join( band_dict['band_dir'], source_dict['name'] + '_' + band_dict['band_name']) elif os.path.isfile(band_dict['band_dir']): in_fitspath = os.path.join(band_dict['band_dir']) # Work out whether the file extension for FITS file in question is .fits or .fits.gz file_found = False if os.path.exists(in_fitspath + '.fits'): in_fitspath = in_fitspath + '.fits' file_found = True elif os.path.exists(in_fitspath + '.fits.gz'): in_fitspath = in_fitspath + '.fits.gz' file_found = True if file_found == False: continue # Check if a cutout has been requested; if so, no need to open map if band_dict['make_cutout'] > 0: diam = float(band_dict['make_cutout']) / 3600.0 # Work out map size else: band_header = astropy.io.fits.getheader(in_fitspath) band_wcs = astropy.wcs.WCS(band_header) band_cdelt = band_wcs.wcs.cdelt.max() diam = np.max([ band_cdelt * float(band_header['NAXIS1']), band_cdelt * float(band_header['NAXIS2']) ]) # If diameter established for this band exceeds previous maximum, record if diam > diam_max: file_max = in_fitspath diam_max = diam # Register signal hander, to catch timeouts def Handler(signum, frame): raise Exception("Timout!") signal.signal(signal.SIGALRM, Handler) # Run catalogue-prefetching in a try statement, to catch a pernicious error try_counter = 0 try_success = False while try_counter < 10: if try_success == True: break # try: # signal.alarm(900) # Get AstroMagic catalogue object reference fits logging.setup_log(level="ERROR") importer = ImageImporter() importer.run(file_max, find_error_frame=False) image = importer.image # Run catalogue importer on dummy fits, and save results catalog_importer = CatalogImporter() catalog_importer.config.writing.galactic_catalog_path = os.path.join( kwargs_dict['temp_dir_path'], 'AstroMagic', 'Galaxies.cat') catalog_importer.config.writing.stellar_catalog_path = os.path.join( kwargs_dict['temp_dir_path'], 'AstroMagic', 'Stars.cat') catalog_importer.run(image.frames.primary) catalog_importer.write_galactic_catalog() catalog_importer.write_stellar_catalog() # Record success and break try_success = True signal.alarm(0) break # # Handle errors # except ValueError as e: # if 'rebin' in e.message: # if kwargs_dict['verbose']: print '['+source_dict['name']+'] PTS AstroMagic encountered an error whilst pre-fetching stellar catalogues; re-attemping.' # try_counter += 1 # pytime.sleep(10) # except IndexError as e: # if 'out of range' in e.message: # if kwargs_dict['verbose']: print '['+source_dict['name']+'] PTS AstroMagic encountered an error whilst pre-fetching stellar catalogues; re-attemping.' # try_counter += 1 # pytime.sleep(10) # except Exception as e: # if 'Read timed out' in str(e.message): # if kwargs_dict['verbose']: print '['+source_dict['name']+'] PTS AstroMagic encountered an error whilst pre-fetching stellar catalogues; re-attemping.' # try_counter += 1 # pytime.sleep(10) # signal.alarm(0) return
def Magic(pod, source_dict, band_dict, kwargs_dict): # Run AstroMagic in try-while statements; if debug mode is disabled, an un-star-subtracted map will be used after 10 crashes am_crash = 0 am_fail = True while am_fail: try: # Ensure star-subtraction is actually required if kwargs_dict['starsub'] != True: return pod if band_dict['remove_stars'] != True: return pod if band_dict['band_name'] in str( source_dict['starsub_bands_exclude']).split(';'): print '[' + pod[ 'id'] + '] User explicitly excluded current band from star subtraction for this source.' return pod if str(source_dict['starsub_bands_exclude']) == 'True': print '[' + pod[ 'id'] + '] User explicitly requested no star subtraction for this source.' return pod if pod['verbose']: print '[' + pod[ 'id'] + '] Removing foreground stars and background galaxies with PTS AstroMagic.' # The paths to the image and output (absolute or relative to the current working directory) in_fitspath = pod['in_fitspath'] temp_dir_path = pod['temp_dir_path'] output_path = os.path.join(temp_dir_path, 'AstroMagic', band_dict['band_name']) image_path = in_fitspath # Setting the log level to "ERROR" disables all output from PTS except error messages (probably want to see those); full options are "DEBUG", "INFO", "WARNING", "ERROR", "SUCCESS" logging.setup_log(level="ERROR") # The path to the bad region (as explained in the previous script that I sent you) bad_region_path = None # The FWHM of the image (if known) fwhm = band_dict['beam_arcsec'] * Unit("arcsec") # Import the image importer = ImageImporter() importer.run(image_path, bad_region_path=bad_region_path, fwhm=fwhm, find_error_frame=False) # Get the imported image image = importer.image # Get the mask of bad pixels bad_mask = image.masks.bad if "bad" in image.masks else None # If version of cutout alrady processed by AstroMagic is present, use it; else, commence regular processing if os.path.exists( os.path.join( temp_dir_path, 'AstroMagic', band_dict['band_name'], source_dict['name'] + '_' + band_dict['band_name'] + '_StarSub.fits')): if pod['verbose']: print '[' + pod[ 'id'] + '] AstroMagic accessing pre-processed data for this map.' am_output = astropy.io.fits.getdata( os.path.join( temp_dir_path, 'AstroMagic', band_dict['band_name'], source_dict['name'] + '_' + band_dict['band_name'] + '_StarSub.fits')) pod['cutout'] = am_output pod['pre_reg'] = True return pod # Check that pre-fetched catalogue files exist; if not, return pod if os.path.exists( os.path.join(kwargs_dict['temp_dir_path'], 'AstroMagic', 'Stars.cat')) and os.path.exists( os.path.join(kwargs_dict['temp_dir_path'], 'AstroMagic', 'Galaxies.cat')): pass else: return pod # The path to the directory where all the output will be placed if os.path.exists( os.path.join(temp_dir_path, 'AstroMagic', band_dict['band_name'])): shutil.rmtree( os.path.join(temp_dir_path, 'AstroMagic', band_dict['band_name'])) os.mkdir( os.path.join(temp_dir_path, 'AstroMagic', band_dict['band_name'])) # Make copies of pre-fetched catalogues, to prevent simultaneous access conflicts shutil.copy( os.path.join(kwargs_dict['temp_dir_path'], 'AstroMagic', 'Stars.cat'), os.path.join(temp_dir_path, 'AstroMagic', band_dict['band_name'], 'Stars.cat')) shutil.copy( os.path.join(kwargs_dict['temp_dir_path'], 'AstroMagic', 'Galaxies.cat'), os.path.join(temp_dir_path, 'AstroMagic', band_dict['band_name'], 'Galaxies.cat')) # Create a CatalogImporter instance, and run it to import catalogues catalog_importer = CatalogImporter() catalog_importer.config.stars.use_catalog_file = True catalog_importer.config.galaxies.use_catalog_file = True catalog_importer.config.stars.catalog_path = os.path.join( temp_dir_path, 'AstroMagic', band_dict['band_name'], 'Stars.cat') catalog_importer.config.galaxies.catalog_path = os.path.join( temp_dir_path, 'AstroMagic', band_dict['band_name'], 'Galaxies.cat') catalog_importer.run(image.frames.primary) # If currently handling cut-down thumbnail, only use relevant portion of catalogues # if 'starsub_thumbnail' in pod.keys(): # if pod['starsub_thumbnail']==True: galaxy_catalog_thumb = ThumbCatalogue(pod, source_dict, band_dict, kwargs_dict, catalog_importer) catalog_importer.galactic_catalog = galaxy_catalog_thumb # Create a SourceFinder instance finder = SourceFinder() # If you don't want to do the 'find_other_sources' step, comment-out the line below finder.config.find_other_sources = False # default is True # Downsample map for faster run time if int(band_dict['downsample_factor']) > 1: if pod['verbose']: print '[' + pod[ 'id'] + '] AstroMagic will run on copy of map downsampled by factor of ' + str( int(band_dict['downsample_factor']) ) + ', to improve speed.' finder.config.downsample_factor = int( band_dict['downsample_factor']) # Run the source finder if pod['verbose']: print '[' + pod[ 'id'] + '] AstroMagic locating online catalogue sources in map.' special_region = None # Not important except for debugging ignore_region = None # Not important except when certain areas need to be completely ignored from the extraction procedure finder.config.build_catalogs = False # For using pre-fetched catalogue files try: finder.run(image.frames.primary, catalog_importer.galactic_catalog, catalog_importer.stellar_catalog, special_region, ignore_region, bad_mask) except RuntimeError as e: if 'found' in e.message: if pod['verbose']: print '[' + pod[ 'id'] + '] AstroMagic found no sources to remove.' pod['pre_reg'] = True return pod else: pdb.set_trace() # Save the galaxy region galaxy_region = finder.galaxy_region galaxy_region_path = filesystem.join(output_path, "galaxies.reg") galaxy_region.save(galaxy_region_path) # Save the star region star_region = finder.star_region star_region_path = filesystem.join(output_path, "stars.reg") if star_region is not None: star_region.save(star_region_path) # Save the saturation region saturation_region = finder.saturation_region saturation_region_path = filesystem.join(output_path, "saturation.reg") if saturation_region is not None: saturation_region.save(saturation_region_path) # Save the region of other sources other_region = finder.other_region other_region_path = filesystem.join(output_path, "other_sources.reg") if other_region is not None: other_region.save(other_region_path) # Get the segmentation maps (galaxies, stars and other sources) from the SourceFinder galaxy_segments = finder.galaxy_segments star_segments = finder.star_segments other_segments = finder.other_segments # Make sure target galaxy isn't identified as star segment target_segment = star_segments[int(round(pod['centre_i'])), int(round(pod['centre_j']))] star_segments[np.where(star_segments == target_segment)] = 0.0 # Handle stars that have been conflated with the target galaxy star_segments = OverlargeStars(pod, star_segments, saturation_region_path, star_region_path, galaxy_region_path, image, source_dict, band_dict, temp_dir_path) # Region files can be adjusted by the user; if this is done, they have to be reloaded star_region = Region.from_file( star_region_path.replace('.reg', '_revised.reg')) saturation_region = Region.from_file( saturation_region_path.replace('.reg', '_revised.reg')) """ # Remove all but target galaxy from galaxy region file ExcessGalaxies(galaxy_region_path, galaxy_principal) galaxy_region = Region.from_file(galaxy_region_path.replace('.reg','_revised.reg')) """ # Remopve all galaxies from galaxy region file shutil.copy2(galaxy_region_path, galaxy_region_path.replace('.reg', '_revised.reg')) gal_header = '# Region file format: DS9 version 4.1\n' gal_file_new = open( galaxy_region_path.replace('.reg', '_revised.reg'), 'w') gal_file_new.write(gal_header) gal_file_new.close() galaxy_region = Region.from_file( galaxy_region_path.replace('.reg', '_revised.reg')) # Create a map of the the segmentation maps segments = Image("segments") # Add the segmentation map of the galaxies segments.add_frame(galaxy_segments, "galaxies") # Add the segmentation map of the saturated stars if star_segments is not None: segments.add_frame(star_segments, "stars") # Add the segmentation map of the other sources if other_segments is not None: segments.add_frame(other_segments, "other_sources") # Save the FITS file with the segmentation maps path = filesystem.join(output_path, "segments.fits") segments.save(path) # Create an SourceExtractor instance if pod['verbose']: print '[' + pod[ 'id'] + '] AstroMagic extracting background sources.' extractor = SourceExtractor() # Run the source extractor extractor.run(image.frames.primary, galaxy_region, star_region, saturation_region, other_region, galaxy_segments, star_segments, other_segments) # Determine the path to the result result_path = filesystem.join( temp_dir_path, 'AstroMagic', band_dict['band_name'], source_dict['name'] + '_' + band_dict['band_name'] + '_StarSub.fits') # Save the resulting image as a FITS file image.frames.primary.save(result_path, header=image.original_header) # Grab AstroMagic output and return in pod am_output = astropy.io.fits.getdata(result_path) pod['cutout'] = am_output pod['pre_reg'] = True am_fail = False return pod # Handle exceptions except: am_crash += 1 if am_crash >= 10: if kwargs_dict['debug']: print '[' + pod['id'] + '] AstroMagic failed for ' + pod[ 'id'] + '! Suggest debugging.' raise ValueError('AstroMagic failed for ' + pod['id'] + '! Suggest debugging.') else: print '[' + pod['id'] + '] AstroMagic failed for ' + pod[ 'id'] + '! Suggest debugging.' return pod