def filter_source_table(source_table, radius): logger.info('Filtering source list') # Build up an index index = np.ones(len(source_table), dtype=bool) # Include only stars in the centre edge_margin = 512 overscan_width = 20 image_size = 2048 index &= (source_table['X_coordinate'] > (overscan_width + edge_margin)) index &= (source_table['X_coordinate'] < (image_size - edge_margin)) index &= (source_table['Y_coordinate'] > edge_margin) index &= (source_table['Y_coordinate'] < (image_size - edge_margin)) # Now only a specific flux range. Assume the stellar flux goes into # `psf_size` pixels so the value can be higher than 2**16-1 psf_size = 2. flux_lim_low, flux_lim_high = 1E3 * psf_size, 45E3 * psf_size index &= (source_table['Aper_flux_3'] >= flux_lim_low) index &= (source_table['Aper_flux_3'] <= flux_lim_high) # Only include isolated stars index &= isolated_index(source_table['X_coordinate'], source_table['Y_coordinate'], radius=radius) # Return the final catalogue return source_table[index]
def from_file(cls, filename, cursor, sky_radius_inner, sky_radius_outer): logger.info('Extracting transmission from %s', filename) with open_fits(filename) as infile: header = infile[0].header image_id = header['image_id'] photometry_results = extract_photometry_results( filename, cursor, image_id, sky_radius_inner, sky_radius_outer) photometry_results['image_id'] = image_id return cls(**photometry_results)
def isolated_index(x, y, radius=6.): logger.info('Filtering with an isolation radius: %s', radius) tree = KDTree(np.vstack([x, y]).T) index = np.zeros_like(x, dtype=bool) for i in np.arange(x.size): within = tree.query_ball_point((x[i], y[i]), radius) # There should be only one star (the target) within `radius` pixels if len(within) == 1: index[i] = True return index
def source_detect(fname, n_pixels, threshold, fwhmfilt, aperture_radius): logger.info('Running source detect') logger.debug('n_pixels: %s, threshold: %s', n_pixels, threshold) with tempfile.NamedTemporaryFile(suffix='.fits') as tfile: cmd = ['imcore', fname, 'noconf', tfile.name, n_pixels, threshold, '--noell', '--filtfwhm', fwhmfilt, '--rcore', aperture_radius] str_cmd = list(map(str, cmd)) logger.debug('Running command [%s]', ' '.join(str_cmd)) sp.check_call(str_cmd) tfile.seek(0) with open_fits(tfile.name) as infile: return infile[1].data
def render_fits_catalogue(data, fname): logger.info('Rendering fits file to %s', fname) columns_data = { field_name: np.array([getattr(row, field_name) for row in data]) for field_name in data[0]._fields } columns = { field_name: fits.Column(name=field_name, format=column_type(columns_data[field_name]), array=columns_data[field_name]) for field_name in columns_data } header = {'image_id': data[0].ref_image_id,} phdu = fits.PrimaryHDU(header=fits.Header(header.items())) tbl = fits.BinTableHDU.from_columns(columns.values()) tbl.name = 'transmission_catalogue' hdulist = fits.HDUList([phdu, tbl]) hdulist.writeto(fname, clobber=True)
def update(self, cursor): try: ref_image_id = get_refcat_id(self.real_filename) except NoAutoguider: # Return early but ensure the job is removed from the database by # not propogating the exception return if not ref_catalogue_exists(cursor, ref_image_id): logger.info("Reference catalogue missing, creating") ref_image_filename = ref_image_path(ref_image_id, cursor) build_catalogue(ref_image_filename, cursor) else: logger.info("Reference catalogue exists") t = TransmissionEntry.from_file( self.real_filename, cursor, sky_radius_inner=RADIUS_INNER, sky_radius_outer=RADIUS_OUTER ) t.upload_to_database(cursor)
def watcher_loop_step(connection): # Starts transaction for job_queue table, short lived so Paladin should not # have a write lock with transaction(connection) as cursor: transmission_jobs = fetch_transmission_jobs(cursor) njobs = len(transmission_jobs) logger.info("Found %s jobs", njobs) # Separate transaction for updating transmission database with transaction(connection) as cursor: for i, transmission_job in enumerate(transmission_jobs): logger.info("Job %d/%d", i + 1, njobs) try: transmission_job.update(cursor) except Exception as e: logger.exception("Exception occurred: %s", str(e)) else: transmission_job.remove_from_database(cursor)
def extract_from_file(fname, n_pixels, threshold, fwhmfilt, isolation_radius, aperture_radius, region_filename=None): logger.info('Extracting catalogue from %s', fname) with open_fits(fname) as infile: header = infile[0].header ref_image_id = header['image_id'] source_table = source_detect(fname, n_pixels=n_pixels, threshold=threshold, fwhmfilt=fwhmfilt, aperture_radius=aperture_radius) logger.info('Found %s sources', len(source_table)) filtered_source_table = filter_source_table(source_table, radius=isolation_radius) logger.info('Keeping %s sources', len(filtered_source_table)) if region_filename is not None: with RegionFile(region_filename, aperture_radius=aperture_radius) as rfile: rfile.add_regions(filtered_source_table, colour='green') rfile.add_regions(source_table, colour='red') inc_prescan = image_has_prescan(fname) logger.debug('Image has prescan: %s', inc_prescan) for row in filtered_source_table: yield TransmissionCatalogueEntry( aperture_radius=aperture_radius, ref_image_id=int(ref_image_id), x_coordinate=float(row['X_coordinate']), y_coordinate=float(row['Y_coordinate']), inc_prescan=inc_prescan, flux_adu=float(row['Aper_flux_3']))
def watcher(connection): logger.info("Starting watcher") logger.debug("Connecting to central hub") hub = Pyro4.Proxy("PYRONAME:central.hub") try: hub.startThread("Transparency") except Exception as err: logger.exception("Cannot connect to pyro hub") raise while True: try: logger.debug("Pinging hub") hub.update_transp(time.time()) except Exception as err: logger.exception("Failure communicating with hub process") raise with time_context(): watcher_loop_step(connection) logger.debug("Sleeping for %s seconds", SLEEP_TIME) time.sleep(SLEEP_TIME)
def remove_from_database(self, cursor): logger.info("Removing {self} from the database".format(self=self)) cursor.execute("delete from job_queue where job_id = %s", (self.job_id,))
def ref_catalogue_exists(cursor, ref_id): logger.info("Checking if ref image {ref_id} exists".format(ref_id=ref_id)) cursor.execute(REFCAT_QUERY) ref_ids = set([row[0] for row in cursor]) return ref_id in ref_ids
def fetch_transmission_jobs(cursor): logger.info("Fetching transmission jobs") cursor.execute(JOB_QUERY) # Prefetch the jobs to allow the cursor to perform another query jobs = cursor.fetchall() return [Job.from_row(row) for row in jobs]