def handle_certificate_problem(self, error_message): logger.warning("Problem with CADC certificate") self.app.get_model().stop_loading_images() view = self.app.get_view() view.hide_image_loading_dialog() view.show_certificate_dialog(self, error_message)
def handle_general_download_error(self, error_message, download_request): logger.warning("A problem occurred while downloading: %s" % error_message) logger.error("-" * 60) logger.error(traceback.format_exc()) logger.error("-" * 60) self._failed_downloads.append(download_request) self.app.get_view().show_retry_download_dialog(self, error_message)
def build_source_reading(self, expnum, ccd, X, Y): """ Given the location of a source in the image, create a source reading. """ image_uri = storage.dbimages_uri(expnum=expnum, ccd=None, version='p', ext='.fits', subdir=None) logger.debug('Trying to access {}'.format(image_uri)) if not storage.exists(image_uri, force=False): logger.warning('Image not in dbimages? Trying subdir.') image_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p') if not storage.exists(image_uri, force=False): logger.warning("Image doesn't exist in ccd subdir. %s" % image_uri) return None slice_rows=config.read("CUTOUTS.SINGLETS.SLICE_ROWS") slice_cols=config.read("CUTOUTS.SINGLETS.SLICE_COLS") if X == -9999 or Y == -9999 : logger.warning("Skipping {} as x/y not resolved.".format(image_uri)) return None if not (-slice_cols/2. < X < 2048+slice_cols/2. and -slice_rows/2. < Y < 4600+slice_rows/2.0): logger.warning("Central location ({},{}) off image cutout.".format(X,Y)) return None mopheader_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p', ext='.mopheader') if not storage.exists(mopheader_uri, force=False): # ELEVATE! we need to know to go off and reprocess/include this image. logger.critical('Image exists but processing incomplete. Mopheader missing. {}'.format(image_uri)) return None mopheader = get_mopheader(expnum, ccd) # Build astrom.Observation observation = astrom.Observation(expnum=str(expnum), ftype='p', ccdnum=str(ccd), fk="") observation.rawname = os.path.splitext(os.path.basename(image_uri))[0]+str(ccd).zfill(2) observation.header = mopheader return observation
def from_string(cls, comment): """ Build an MPC Comment from a string. """ values = comment.split() if len(values) < 8: logger.warning("non-OSSOS format MPC line read") return comment comment = comment.split('%')[-1] return MPCComment(source_name = values[1], frame=values[0], X=values[3], Y=values[4], MPCNote=values[2][1:], magnitude=values[5], mag_uncertainty=values[6], plate_uncertainty=values[7], comment=comment)
def from_source_reference(expnum, ccd, X, Y): """ Given the location of a source in the image, create a source reading. """ image_uri = storage.dbimages_uri(expnum=expnum, ccd=None, version='p', ext='.fits', subdir=None) logger.debug('Trying to access {}'.format(image_uri)) if not storage.exists(image_uri, force=False): logger.warning('Image not in dbimages? Trying subdir.') image_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p') if not storage.exists(image_uri, force=False): logger.warning("Image doesn't exist in ccd subdir. %s" % image_uri) return None if X == -9999 or Y == -9999 : logger.warning("Skipping {} as x/y not resolved.".format(image_uri)) return None mopheader_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p', ext='.mopheader') if not storage.exists(mopheader_uri, force=False): # ELEVATE! we need to know to go off and reprocess/include this image. logger.critical('Image exists but processing incomplete. Mopheader missing. {}'.format(image_uri)) return None mopheader = storage.get_mopheader(expnum, ccd) # Build astrom.Observation observation = Observation(expnum=str(expnum), ftype='p', ccdnum=str(ccd), fk="") observation.rawname = os.path.splitext(os.path.basename(image_uri))[0]+str(ccd).zfill(2) observation.header = mopheader return observation
def from_source_reference(expnum, ccd, x, y): """ Given the location of a source in the image, create a source reading. """ image_uri = storage.dbimages_uri(expnum=expnum, ccd=None, version='p', ext='.fits', subdir=None) logger.debug('Trying to access {}'.format(image_uri)) if not storage.exists(image_uri, force=False): logger.warning('Image not in dbimages? Trying subdir.') image_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p') if not storage.exists(image_uri, force=False): logger.warning("Image doesn't exist in ccd subdir. %s" % image_uri) return None if x == -9999 or y == -9999: logger.warning( "Skipping {} as x/y not resolved.".format(image_uri)) return None mopheader_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p', ext='.mopheader') if not storage.exists(mopheader_uri, force=False): # ELEVATE! we need to know to go off and reprocess/include this image. logger.critical( 'Image exists but processing incomplete. Mopheader missing. {}' .format(image_uri)) return None # Build astrom.Observation observation = Observation(expnum=str(expnum), ftype='p', ccdnum=str(ccd), fk="") observation.rawname = os.path.splitext( os.path.basename(image_uri))[0] + str(ccd).zfill(2) return observation
def phot(fits_filename, x_in, y_in, aperture=15, sky=20, swidth=10, apcor=0.3, maxcount=30000.0, exptime=1.0, zmag=None): """ Compute the centroids and magnitudes of a bunch sources detected on CFHT-MEGAPRIME images. Args: fits_filename: str The name of the file containing the image to be processed. Returns a MOPfiles data structure. """ if (not os.path.exists(fits_filename) and not fits_filename.endswith(".fits")): # For convenience, see if we just forgot to provide the extension fits_filename += ".fits" try: input_hdulist = fits.open(fits_filename) except Exception as err: logger.debug(str(err)) raise TaskError("Failed to open input image: %s" % err.message) ## get the filter for this image filter = input_hdulist[0].header.get('FILTER', 'DEFAULT') ### Some nominal CFHT zeropoints that might be useful zeropoints = {"I": 25.77, "R": 26.07, "V": 26.07, "B": 25.92, "DEFAULT": 26.0, "g.MP9401": 26.4 } photzp = input_hdulist[0].header.get('PHOTZP', zeropoints.get(filter, zeropoints["DEFAULT"])) if zmag is None: zmag = input_hdulist[0].header.get('PHOTZP', zeropoints[filter]) ### check for magic 'zeropoint.used' files zpu_file = "zeropoint.used" if os.access(zpu_file, os.R_OK): with open(zpu_file) as zpu_fh: zmag = float(zpu_fh.read()) else: zpu_file = "%s.zeropoint.used" % ( fits_filename[0:-5]) if os.access(zpu_file, os.R_OK): with open(zpu_file) as zpu_fh: zmag = float(zpu_fh.read()) if zmag != photzp: logger.warning("ZEROPOINT {} used in DAOPHOT doesn't match PHOTZP {} in header".format(zmag, photzp)) ### setup IRAF to do the magnitude/centroid measurements iraf.set(uparm="./") iraf.digiphot() iraf.apphot() iraf.daophot(_doprint=0) iraf.photpars.apertures = aperture iraf.photpars.zmag = zmag iraf.datapars.datamin = 0 iraf.datapars.datamax = maxcount iraf.datapars.exposur = "" iraf.datapars.itime = exptime iraf.fitskypars.annulus = sky iraf.fitskypars.dannulus = swidth iraf.fitskypars.salgorithm = "mode" iraf.fitskypars.sloclip = 5.0 iraf.fitskypars.shiclip = 5.0 iraf.centerpars.calgori = "centroid" iraf.centerpars.cbox = 5. iraf.centerpars.cthreshold = 0. iraf.centerpars.maxshift = 2. iraf.centerpars.clean = 'no' iraf.phot.update = 'no' iraf.phot.verbose = 'no' iraf.phot.verify = 'no' iraf.phot.interactive = 'no' # Used for passing the input coordinates coofile = tempfile.NamedTemporaryFile(suffix=".coo", delete=False) coofile.write("%f %f \n" % (x_in, y_in)) # Used for receiving the results of the task # mag_fd, mag_path = tempfile.mkstemp(suffix=".mag") magfile = tempfile.NamedTemporaryFile(suffix=".mag", delete=False) # Close the temp files before sending to IRAF due to docstring: # "Whether the name can be used to open the file a second time, while # the named temporary file is still open, varies across platforms" coofile.close() magfile.close() os.remove(magfile.name) iraf.phot(fits_filename, coofile.name, magfile.name) # TODO: Move this filtering downstream to the user. phot_filter = "PIER==0 && CIER==0 && SIER==0" pdump_out = iraf.pdump(magfile.name, "XCENTER,YCENTER,MAG,MERR,ID,XSHIFT,YSHIFT,LID", phot_filter, header='no', parameters='yes', Stdout=1) if not len(pdump_out) > 0: mag_content = open(magfile.name).read() raise TaskError("photometry failed. {}".format(mag_content)) os.remove(coofile.name) os.remove(magfile.name) ### setup the mop output file structure hdu = {} hdu['header'] = {'image': input_hdulist, 'aper': aperture, 's_aper': sky, 'd_s_aper': swidth, 'aper_cor': apcor, 'zeropoint': zmag} hdu['order'] = ['X', 'Y', 'MAG', 'MERR', 'ID', 'XSHIFT', 'YSHIFT', 'LID'] hdu['format'] = {'X': '%10.2f', 'Y': '%10.2f', 'MAG': '%10.2f', 'MERR': '%10.2f', 'ID': '%8d', 'XSHIFT': '%10.2f', 'YSHIFT': '%10.2f', 'LID': '%8d'} hdu['data'] = {} for col in hdu['order']: hdu['data'][col] = [] for line in pdump_out: values = line.split() for col in hdu['order']: if re.match('\%.*f', hdu['format'][col]): if col == 'MAG': values[0] = float(values[0]) - float(apcor) hdu['data'][col].append(float(values.pop(0))) elif re.match('\%.*d', hdu['format'][col]): hdu['data'][col].append(int(values.pop(0))) else: hdu['data'][col].append(values.pop(0)) # Clean up temporary files generated by IRAF os.remove("datistabe.par") os.remove("datpdump.par") return hdu
def parse(self, ssos_result_filename_or_lines): """ given the result table create 'source' objects. :type ssos_result_table: Table :param ssos_result_table: """ table_reader = ascii.get_reader(Reader=ascii.Basic) table_reader.inconsistent_handler = self._skip_missing_data table_reader.header.splitter.delimiter = '\t' table_reader.data.splitter.delimiter = '\t' table = table_reader.read(ssos_result_filename_or_lines) sources = [] observations = [] source_readings = [] ref_pvwcs = None downloader = Downloader() warnings.filterwarnings('ignore') for row in table: # check if a dbimages object exists ccd = int(row['Ext']) - 1 expnum = row['Image'].rstrip('p') # ADDING THIS TEMPORARILY TO GET THE NON-OSSOS DATA OUT OF THE WAY WHILE DEBUGGING if (row['Telescope_Insturment'] != 'CFHT/MegaCam') or (row['Filter'] != 'r.MP9601'): continue # it's fine for OSSOS, go get the image image_uri = storage.dbimages_uri(expnum=expnum, ccd=None, version='p', ext='.fits', subdir=None) logger.info('Trying to access %s\n%s' % (row.data, image_uri)) if not storage.exists(image_uri, force=False): logger.warning('Image not in dbimages? Trying subdir.') image_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p') if not storage.exists(image_uri, force=False): logger.warning("Image doesn't exist in ccd subdir. %s" % image_uri) continue if row['X'] == -9999 or row['Y'] == -9999 : logger.warning("Skipping %s as x/y not resolved." % ( row['Image'])) continue mopheader_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p', ext='.mopheader') if not mopheader_uri in mopheaders: if not storage.exists(mopheader_uri, force=False): logger.warning('mopheader missing, but images exists') continue # raise flag if no MOPHEADER mopheader_fpt = cStringIO.StringIO(storage.open_vos_or_local(mopheader_uri).read()) mopheader = fits.open(mopheader_fpt) mopheaders[mopheader_uri] = mopheader mopheader = mopheaders[mopheader_uri] # Build astrom.Observation observation = astrom.Observation(expnum=str(expnum), ftype='p', ccdnum=str(ccd), fk="") observation.rawname = os.path.splitext(os.path.basename(image_uri))[0]+str(ccd).zfill(2) observation.header = mopheader[0].header MJD_OBS_CENTER = mpc.Time(observation.header['MJD-OBSC'], format='mjd', scale='utc', precision=5 ).replicate(format='mpc') observation.header['MJD_OBS_CENTER'] = str(MJD_OBS_CENTER) observation.header['MAXCOUNT'] = MAXCOUNT observation.header['SCALE'] = observation.header['PIXSCALE'] #observation.header['CHIP'] = str(observation.header['CHIPNUM']).zfill(2) observation.header['NAX1'] = observation.header['NAXIS1'] observation.header['NAX2'] = observation.header['NAXIS2'] observation.header['MOPversion'] = observation.header['MOP_VER'] observation.header['FWHM'] = 4 # a download pixel 1,1 of this data to due offsets with. x_cen = int(min(max(1,row['X']),observation.header['NAX1'])) y_cen = int(min(max(1,row['Y']),observation.header['NAX2'])) if image_uri not in astheaders: hdulist = downloader.download_hdulist( uri=image_uri, view='cutout', cutout='[{}][{}:{},{}:{}]'.format(ccd+1, x_cen, x_cen, y_cen, y_cen)) astheaders[image_uri] = hdulist hdulist = astheaders[image_uri] pvwcs = wcs.WCS(hdulist[0].header) (ra,dec) = pvwcs.xy2sky(x_cen, y_cen) if ref_pvwcs is None: ref_pvwcs = pvwcs xref = row['X'] yref = row['Y'] (x0, y0) = ref_pvwcs.sky2xy(ra,dec) x0 += row['X'] - x_cen y0 += row['Y'] - y_cen # Build astrom.SourceReading observations.append(observation) from_input_file = observation.rawname in self.input_rawnames null_observation = observation.rawname in self.null_observations print observation.rawname, observation.header['MJD_OBS_CENTER'], null_observation, from_input_file source_reading = astrom.SourceReading(x=row['X'], y=row['Y'], xref=xref, yref=yref, x0=x0, y0=y0, ra=row['Object_RA'], dec=row['Object_Dec'], obs=observation, ssos=True, from_input_file=from_input_file, null_observation=null_observation) #if observation.rawname in self.input_rawnames: # source_readings.insert(0, source_reading) #else: source_readings.append(source_reading) # build our array of SourceReading objects sources.append(source_readings) warnings.filterwarnings('once') return SSOSData(observations, sources, self.provisional_name)
def match_planted(cand_filename, measures): """ """ # Load the planted objects associated with this candidate file. try: cands = astrom.parse(cand_filename) except: sys.stderr("Failed while reading {}".format(cand_filename)) return matches_fptr = open(os.path.basename(cand_filename)+".eff", 'w') objects_planted_uri = cands.observations[0].get_object_planted_uri() planted_object_file = Planted_object_file(objects_planted_uri) planted_objects = planted_object_file.planted_objects matched = {} false_positive_sources = [] false_negative_sources = [] confused_measure = {} for idx in range(len(planted_objects)): planted_object = planted_objects[idx] # look for a matching .cand entry cand_dist = None cand_source = None for source in cands.get_sources(): obs = source.get_readings() dist = math.sqrt((obs[0].x-planted_object.x)**2 + (obs[0].y - planted_object.y)**2) if cand_dist is None or cand_dist > dist: cand_dist = dist cand_source = source # look for a matching .mpc entry measure_dist = None measure_source = None for provisional in measures: x = float(measures[provisional][0].comment.X) y = float(measures[provisional][0].comment.Y) dist = math.sqrt( (x - planted_object.x)**2 + (y-planted_object.y)**2) if measure_dist is None or measure_dist > dist: measure_dist = dist measure_source = measures[provisional] if measure_dist < 6.0: # this gets 'unset' if we match this measure with a cand, in the next step. confused_measure[provisional] = planted_object planted_object.confused += 1 # determine if planted_object was found if cand_dist < 6.0: # In candidate list if measure_dist is not None and measure_dist < 6.0: # accepted. planted_object.recovered = measure_source planted_object.false_negative = None planted_object.confused -= 1 del(confused_measure[measure_source[0].provisional_name]) matched[measure_source[0].provisional_name] = True else: # rejected. planted_object.false_negative = cand_source false_negative_sources.append(cand_source) matches_fptr.write( ("## F: found\n" "## M: multiple matches\n" "## C: other match of confused multiple match\n" "## N: not found\n" "## P: false Positive \n") ) matches_fptr.write("{} {} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s}\n".format( "Key",planted_object_file.header,"x_dao","y_dao", "rate_mes", "ang_mes", "mag1_dao","merr1_dao", "mag2_dao","merr2_dao", "mag3_dao","merr3_dao" )) for planted_object in planted_objects: if planted_object.recovered is not None: confused = "F" if planted_object.confused > 1 : confused = "M" measure = planted_object.recovered start_jd = measure[0].date.jd x = float(measure[0].comment.X) x3 = float(measure[2].comment.X) y = float(measure[0].comment.Y) y3 = float(measure[2].comment.Y) end_jd = measure[2].date.jd rate = math.sqrt((x3-x)**2 + (y3-y)**2)/( 24*(end_jd - start_jd) ) angle = math.degrees(math.atan2(y3 - y,x3 - x)) matches_fptr.write("{:3s} {} {:8.2f} {:8.2f} {:8.2f} {:8.2f} ".format( confused, str(planted_object), x, y, rate, angle)) # record the three discovery magnitudes for idx in range(3): try: mag = float(measure[idx].comment.mag) merr = float(measure[idx].comment.mag_uncertainty) except Exception as e: mag = -1.0 merr = -1.0 logger.warning(str(e)) matches_fptr.write("{:8.2f} {:8.2f} ".format(mag,merr)) matches_fptr.write("\n") elif planted_object.false_negative is not None: source = planted_object.false_negative reading = source.get_reading(0) third = source.get_reading(2) cutout = image_slice_downloader.download_cutout(reading, needs_apcor=True) start_jd = mpc.Time(reading.obs.header['MJD_OBS_CENTER'],format='mpc', scale='utc').jd end_jd = mpc.Time(third.obs.header['MJD_OBS_CENTER'], format='mpc', scale='utc').jd rate = math.sqrt((third.x - reading.x)**2 + (third.y - reading.y)**2)/( 24*(end_jd - start_jd) ) angle = math.degrees(math.atan2(third.y - reading.y,third.x - reading.x)) matches_fptr.write("{:3s} {} {:8.2f} {:8.2f} {:8.2f} {:8.2f} ".format( "N", str(planted_object), reading.x, reading.y, rate, angle)) for idx in range(3): try: (x, y, mag, merr) = cutout.get_observed_magnitude() except TaskError as e: logger.warning(str(e)) mag = -1.0 merr = -1.0 matches_fptr.write("{:8.2f} {:8.2f} ".format(mag,merr)) matches_fptr.write("\n") else: matches_fptr.write("{:3s} {}".format("X",str(planted_object))) matches_fptr.write(10*" {:8.2f}".format(0.0)) matches_fptr.write("\n") matches_fptr.flush() for provisional in measures: if matched.get(provisional,False): continue # this source is a false positive measure = measures[provisional] start_jd = measure[0].date.jd x = float(measure[0].comment.X) x3 = float(measure[2].comment.X) y = float(measure[0].comment.Y) y3 = float(measure[2].comment.Y) end_jd = measure[2].date.jd if provisional in confused_measure: confused = "C" planted_object = confused_measure[provisional] else: confused = "P" planted_object = " {:4d}".format(-1)+6*" {:8.2f}".format(0) # look for the matching cand entry cand_dist = None cand_source = None for source in cands.get_sources(): obs = source.get_readings() dist = math.sqrt((obs[0].x - x)**2 + (obs[0].y - y)**2) if cand_dist is None or cand_dist > dist: cand_dist = dist cand_source = source false_positive_sources.append(cand_source) rate = math.sqrt((x3-x)**2 + (y3-y)**2)/( 24*(end_jd - start_jd)) angle = math.degrees(math.atan2(y3 - y,x3 - x)) # record the three discovery magnitudes matches_fptr.write("{:3s} {} {:8.2f} {:8.2f} {:8.2f} {:8.2f} ".format( confused, planted_object, x, y, rate, angle)) for idx in range(3): try: mag = float(measure[idx].comment.mag) merr = float(measure[idx].comment.mag_uncertainty) except Exception as e: mag = -1.0 merr = -1.0 logger.warning(str(e)) matches_fptr.write("{:8.2f} {:8.2f} ".format(mag,merr)) matches_fptr.write("\n") matches_fptr.close() ## write out the false_positives and false_negatives if not os.access('false_positives',os.R_OK): os.mkdir('false_positives') if not os.access('false_negatives', os.R_OK): os.mkdir('false_negatives') if len(false_positive_sources) > 0 : wh = open('false_positives/'+os.path.basename(cand_filename),'w+') writer = astrom.StreamingAstromWriter(wh,cands.sys_header) #writer.write_headers(cands.observations) for source in false_positive_sources: writer.write_source(source) writer.flush() writer.close() if len(false_negative_sources) > 0 : wh = open('false_negatives/'+os.path.basename(cand_filename),'w+') writer = astrom.StreamingAstromWriter(wh,cands.sys_header) #writer.write_headers(cands.observations) for source in false_negative_sources: writer.write_source(source) writer.flush() writer.close() return matches_fptr.name
def match_planted(astrom_filename, match_filename, false_positive_filename): """ Using the astrom_filename as input get the Object.planted file from VOSpace and match planted sources with found sources. The Object.planted list is pulled from VOSpace based on the standard file-layout and name of the first exposure as read from the .astrom file. :param astrom_filename: name of the fk*reals.astrom file to check against Object.planted :param match_filename: a file that will contain a list of all planted sources and the matched found source :param false_positive_filename: .astrom format output containing input objects that had no match in planted """ image_slice_downloader = ImageCutoutDownloader(slice_rows=100, slice_cols=100) fk_candidate_observations = astrom.parse(astrom_filename) matches_fptr = storage.open_vos_or_local(match_filename,'w') objects_planted_uri = fk_candidate_observations.observations[0].get_object_planted_uri() objects_planted = image_slice_downloader.download_raw(objects_planted_uri, view='data').split('\n') planted_objects = [] for line in objects_planted[1:]: if len(line) == 0 or line[0] == '#': continue planted_objects.append(PlantedObject(line)) false_positives_stream_writer = None matches_fptr.write("#{}\n".format(fk_candidate_observations.observations[0].rawname)) matches_fptr.write("{:1s}{} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s}\n".format( "",objects_planted[0],"x_dao","y_dao","mag_dao","merr_dao", "rate_mes", "ang_mes", "dr_pixels" )) found_idxs = [] for source in fk_candidate_observations.get_sources(): reading = source.get_reading(0) third = source.get_reading(2) cutout = image_slice_downloader.download_cutout(reading, needs_apcor=True) try: (x, y, mag, merr) = cutout.get_observed_magnitude() except TaskError as e: logger.warning(str(e)) mag = 0.0 merr = -1.0 matched = None for idx in range(len(planted_objects)): planted_object = planted_objects[idx] dist = math.sqrt((reading.x-planted_object.x)**2 + (reading.y - planted_object.y)**2) if matched is None or dist < matched: matched = dist matched_object_idx = idx start_jd = Time(reading.obs.header['MJD_OBS_CENTER'],format='mpc', scale='utc').jd end_jd = Time(third.obs.header['MJD_OBS_CENTER'], format='mpc', scale='utc').jd exptime = float(reading.obs.header['EXPTIME']) rate = math.sqrt((third.x - reading.x)**2 + (third.y - reading.y)**2)/( 24*(end_jd - start_jd) ) angle = math.degrees(math.atan2(third.y - reading.y,third.x - reading.x)) if matched > 3*rate*exptime/3600.0 and False : # this is a false positive (candidate not near artificial source) # create a .astrom style line for feeding to validate for checking later if false_positives_ftpr is None or false_positives_stream_writer is None: # create false positive file for storing results false_positives_ftpr = open(false_positive_filename,'w+') false_positives_stream_writer = StreamingAstromWriter( false_positives_ftpr,fk_candidate_observations.sys_header) false_positives_stream_writer.write_source(source) false_positives_ftpr.flush() continue elif matched_object_idx in found_idxs: repeat = '#' else: repeat = ' ' found_idxs.append(matched_object_idx) mags = [] merrs = [] for this_reading in source.get_readings()[1:]: cutout = image_slice_downloader.download_cutout(this_reading, needs_apcor=True) try: (this_x, this_y, this_mag, this_merr) = cutout.get_observed_magnitude() except TaskError as e: logger.warning(str(e)) this_mag = 0.0 this_merr = -1.0 mags.append(this_mag) merrs.append(this_merr) matches_fptr.write("{:1s}{} {:8.2f} {:8.2f} {:8.2f} {:8.2f} {:8.2f} {:8.2f} {:8.2f} ".format( repeat, str(planted_objects[matched_object_idx]), reading.x, reading.y, mag, merr, rate, angle, matched)) for idx in range(len(mags)): matches_fptr.write("{:8.2f} {:8.2f}".format(mags[idx], merrs[idx])) matches_fptr.write("\n") # close the false_positives if false_positives_ftpr is not None: false_positives_ftpr.close() # record the unmatched Object.planted entries, for use in efficiency computing for idx in range(len(planted_objects)): if idx not in found_idxs: planted_object = planted_objects[idx] matches_fptr.write("{:1s}{} {:8.2f} {:8.2f} {:8.2f} {:8.2f} {:8.2f} {:8.2f} {:8.2f}\n".format("",str(planted_object), 0, 0, 0, 0, 0, 0, 0)) matches_fptr.close()
def phot(fits_filename, x_in, y_in, aperture=15, sky=20, swidth=10, apcor=0.3, maxcount=30000.0, exptime=1.0, zmag=None): """ Compute the centroids and magnitudes of a bunch sources detected on CFHT-MEGAPRIME images. Args: fits_filename: str The name of the file containing the image to be processed. Returns a MOPfiles data structure. """ if not hasattr(x_in, '__iter__'): x_in = [ x_in, ] if not hasattr(y_in, '__iter__'): y_in = [ y_in, ] if (not os.path.exists(fits_filename) and not fits_filename.endswith(".fits")): # For convenience, see if we just forgot to provide the extension fits_filename += ".fits" try: input_hdulist = fits.open(fits_filename) except Exception as err: logger.debug(str(err)) raise TaskError("Failed to open input image: %s" % err.message) ## get the filter for this image filter = input_hdulist[0].header.get('FILTER', 'DEFAULT') ### Some nominal CFHT zeropoints that might be useful zeropoints = { "I": 25.77, "R": 26.07, "V": 26.07, "B": 25.92, "DEFAULT": 26.0, "g.MP9401": 26.4 } if zmag is None: zmag = input_hdulist[0].header.get('PHOTZP', zeropoints[filter]) ### check for magic 'zeropoint.used' files zpu_file = "zeropoint.used" if os.access(zpu_file, os.R_OK): with open(zpu_file) as zpu_fh: zmag = float(zpu_fh.read()) else: zpu_file = "%s.zeropoint.used" % (fits_filename[0:-5]) if os.access(zpu_file, os.R_OK): with open(zpu_file) as zpu_fh: zmag = float(zpu_fh.read()) photzp = input_hdulist[0].header.get( 'PHOTZP', zeropoints.get(filter, zeropoints["DEFAULT"])) if zmag != photzp: logger.warning( "ZEROPOINT {} sent to DAOPHOT doesn't match PHOTZP {} in header". format(zmag, photzp)) ### setup IRAF to do the magnitude/centroid measurements iraf.set(uparm="./") iraf.digiphot() iraf.apphot() iraf.daophot(_doprint=0) iraf.photpars.apertures = aperture iraf.photpars.zmag = zmag iraf.datapars.datamin = 0 iraf.datapars.datamax = maxcount iraf.datapars.exposur = "" iraf.datapars.itime = exptime iraf.fitskypars.annulus = sky iraf.fitskypars.dannulus = swidth iraf.fitskypars.salgorithm = "mode" iraf.fitskypars.sloclip = 5.0 iraf.fitskypars.shiclip = 5.0 iraf.centerpars.calgori = "centroid" iraf.centerpars.cbox = 5. iraf.centerpars.cthreshold = 0. iraf.centerpars.maxshift = 2. iraf.centerpars.clean = 'no' iraf.phot.update = 'no' iraf.phot.verbose = 'no' iraf.phot.verify = 'no' iraf.phot.interactive = 'no' # Used for passing the input coordinates coofile = tempfile.NamedTemporaryFile(suffix=".coo", delete=False) for i in range(len(x_in)): coofile.write("%f %f \n" % (x_in[i], y_in[i])) # Used for receiving the results of the task # mag_fd, mag_path = tempfile.mkstemp(suffix=".mag") magfile = tempfile.NamedTemporaryFile(suffix=".mag", delete=False) # Close the temp files before sending to IRAF due to docstring: # "Whether the name can be used to open the file a second time, while # the named temporary file is still open, varies across platforms" coofile.close() magfile.close() os.remove(magfile.name) iraf.phot(fits_filename, coofile.name, magfile.name) pdump_out = ascii.read(magfile.name, format='daophot') if not len(pdump_out) > 0: mag_content = open(magfile.name).read() raise TaskError("photometry failed. {}".format(mag_content)) # Clean up temporary files generated by IRAF os.remove(coofile.name) os.remove(magfile.name) return pdump_out