def extract_from_file(fname, n_pixels, threshold, fwhmfilt, isolation_radius, aperture_radius, region_filename=None): logger.info('Extracting catalogue from %s', fname) with open_fits(fname) as infile: header = infile[0].header ref_image_id = header['image_id'] source_table = source_detect(fname, n_pixels=n_pixels, threshold=threshold, fwhmfilt=fwhmfilt, aperture_radius=aperture_radius) logger.info('Found %s sources', len(source_table)) filtered_source_table = filter_source_table(source_table, radius=isolation_radius) logger.info('Keeping %s sources', len(filtered_source_table)) if region_filename is not None: with RegionFile(region_filename, aperture_radius=aperture_radius) as rfile: rfile.add_regions(filtered_source_table, colour='green') rfile.add_regions(source_table, colour='red') inc_prescan = image_has_prescan(fname) logger.debug('Image has prescan: %s', inc_prescan) for row in filtered_source_table: yield TransmissionCatalogueEntry( aperture_radius=aperture_radius, ref_image_id=int(ref_image_id), x_coordinate=float(row['X_coordinate']), y_coordinate=float(row['Y_coordinate']), inc_prescan=inc_prescan, flux_adu=float(row['Aper_flux_3']))
def add_regions(self, catalogue, colour): logger.debug('Adding %s regions', colour) for row in catalogue: self.fptr.write( self.circle( row['X_coordinate'], row['Y_coordinate'], colour=colour))
def time_context(message=None): start = time.time() yield end = time.time() if message is None: logger.debug("Time taken: %s seconds", end - start) else: logger.debug(message, end - start)
def upload_to_database(self, cursor): keys = self._fields values = [getattr(self, key) for key in keys] query = '''insert into transmission_log ({keys}) values ({placeholder})'''.format( keys=', '.join(keys), placeholder=', '.join(['%s'] * len(keys))) logger.debug('Executing query: `%s` : [%s]', query, ', '.join(map(str, values))) cursor.execute(query, values)
def main(args): if args.verbose: logger.setLevel("DEBUG") logger.debug(args) with connect_to_database_from_args(args) as cursor: entry = TransmissionEntry.from_file( args.filename, cursor, sky_radius_inner=args.radius_inner, sky_radius_outer=args.radius_outer ) entry.upload_to_database(cursor)
def source_detect(fname, n_pixels, threshold, fwhmfilt, aperture_radius): logger.info('Running source detect') logger.debug('n_pixels: %s, threshold: %s', n_pixels, threshold) with tempfile.NamedTemporaryFile(suffix='.fits') as tfile: cmd = ['imcore', fname, 'noconf', tfile.name, n_pixels, threshold, '--noell', '--filtfwhm', fwhmfilt, '--rcore', aperture_radius] str_cmd = list(map(str, cmd)) logger.debug('Running command [%s]', ' '.join(str_cmd)) sp.check_call(str_cmd) tfile.seek(0) with open_fits(tfile.name) as infile: return infile[1].data
def upload_info(extracted_data, cursor): query = '''insert into transmission_sources ({fields}) values ({placeholders})''' def format_query(query_str): return ' '.join([line.strip() for line in query_str.split('\n')]) for row in extracted_data: full_query = query.format( fields=','.join(row._fields), placeholders=','.join(['%s'] * len(row._fields))) logger.debug('Inserting %s: %s', format_query(full_query), list(row)) cursor.execute(full_query, args=row)
def get_refcat_id(filename): logger.debug("Extracting reference image id from {filename}".format(filename=filename)) with open_fits(filename) as infile: header = infile[0].header try: return header["agrefimg"] except KeyError: logger.exception( """No autoguider reference image found in file %s. Assuming this is ok and continuing.""", filename, ) raise NoAutoguider
def photometry_local(data, x, y, aperture_radius, sky_radius_inner, sky_radius_outer): logger.debug('Sky annulus radii: %s -> %s', sky_radius_inner, sky_radius_outer) apertures = ph.CircularAperture((x, y), r=aperture_radius) annulus_apertures = ph.CircularAnnulus((x, y), r_in=sky_radius_inner, r_out=sky_radius_outer) rawflux_table = ph.aperture_photometry(data, apertures) bkgflux_table = ph.aperture_photometry(data, annulus_apertures) bkg_mean = bkgflux_table['aperture_sum'] / annulus_apertures.area() bkg_sum = bkg_mean * apertures.area() final_sum = rawflux_table['aperture_sum'] - bkg_sum return np.array(final_sum)
def main(args): if args.verbose: logger.setLevel('DEBUG') logger.debug(args) build_catalogue( refimage=args.refimage, region_filename=args.refimage + '.reg', n_pixels=args.npix, threshold=args.threshold, fwhmfilt=args.fwhmfilt, isolation_radius=args.isolation_radius, aperture_radius=args.aperture_radius, db_host=args.db_host, db_user=args.db_user, db_name=args.db_name, db_socket=args.db_socket, fits_out=args.fits_out,)
def main(args): if args.verbose: logger.setLevel('DEBUG') logger.debug(args) schema = database_schema() tables = {} for table_name in schema: query = raw_create_table(table_name, schema) tables[table_name] = query with connect_to_database_from_args(args) as cursor: for table_name, query in tables.items(): cursor.execute('drop table if exists {table_name}'.format( table_name=table_name)) logger.debug('Executing query `%s`', query) cursor.execute(query)
def connect_to_database(user, host, db, unix_socket): if host is not None: with pymysql.connect(user=user, host=host, db=db) as cursor: logger.debug('Connected to database') yield cursor logger.debug('Closing database connection') else: with pymysql.connect(user=user, unix_socket=unix_socket, db=db) as cursor: logger.debug('Connected to database') yield cursor logger.debug('Closing database connection')
def watcher(connection): logger.info("Starting watcher") logger.debug("Connecting to central hub") hub = Pyro4.Proxy("PYRONAME:central.hub") try: hub.startThread("Transparency") except Exception as err: logger.exception("Cannot connect to pyro hub") raise while True: try: logger.debug("Pinging hub") hub.update_transp(time.time()) except Exception as err: logger.exception("Failure communicating with hub process") raise with time_context(): watcher_loop_step(connection) logger.debug("Sleeping for %s seconds", SLEEP_TIME) time.sleep(SLEEP_TIME)
def transaction(connection): with connection as cursor: yield cursor logger.debug('Committing')