def get_stages_todo(ordered_stages, last_stage=None, extra_stages=None): """ Parameters ---------- ordered_stages: list of banzai.stages.Stage objects last_stage: banzai.stages.Stage Last stage to do extra_stages: Stages to do after the last stage Returns ------- stages_todo: list of banzai.stages.Stage The stages that need to be done Notes ----- Extra stages can be other stages that are not in the ordered_stages list. """ if extra_stages is None: extra_stages = [] if last_stage is None: last_index = None else: last_index = ordered_stages.index(last_stage) + 1 stages_todo = [import_utils.import_attribute(stage) for stage in ordered_stages[:last_index]] stages_todo += [import_utils.import_attribute(stage) for stage in extra_stages] return stages_todo
def run_master_maker(image_path_list, runtime_context, frame_type): images = [image_utils.read_image(image_path, runtime_context) for image_path in image_path_list] stage_constructor = import_utils.import_attribute(settings.CALIBRATION_STACKER_STAGE[frame_type.upper()]) stage_to_run = stage_constructor(runtime_context) images = stage_to_run.run(images) for image in images: image.write(runtime_context)
def get_calibration_filename(image): telescope_filename_function = import_utils.import_attribute( context.TELESCOPE_FILENAME_FUNCTION) name_components = { 'site': image.site, 'telescop': telescope_filename_function(image), 'camera': image.header.get('INSTRUME', ''), 'epoch': image.epoch, 'cal_type': calibration_type.lower() } cal_file = '{site}{telescop}-{camera}-{epoch}-{cal_type}'.format( **name_components) for function_name in context.CALIBRATION_FILENAME_FUNCTIONS[ calibration_type]: filename_function = import_utils.import_attribute(function_name) filename_part = filename_function(image) if len(filename_part) > 0: cal_file += '-{}'.format(filename_part) cal_file += '.fits' return cal_file
def run_master_maker(image_path_list, runtime_context, frame_type): images = [ image_utils.read_image(image_path, runtime_context) for image_path in image_path_list ] stage_constructor = import_utils.import_attribute( settings.CALIBRATION_STACKER_STAGE[frame_type.upper()]) stage_to_run = stage_constructor(runtime_context) images = stage_to_run.run(images) for image in images: image.write(runtime_context)
def get_stages_todo(ordered_stages, last_stage=None, extra_stages=None): """ Parameters ---------- ordered_stages: list of banzai.stages.Stage objects last_stage: banzai.stages.Stage Last stage to do extra_stages: Stages to do after the last stage Returns ------- stages_todo: list of banzai.stages.Stage The stages that need to be done Notes ----- Extra stages can be other stages that are not in the ordered_stages list. """ if extra_stages is None: extra_stages = [] if last_stage is None: last_index = None else: last_index = ordered_stages.index(last_stage) + 1 stages_todo = [ import_utils.import_attribute(stage) for stage in ordered_stages[:last_index] ] stages_todo += [ import_utils.import_attribute(stage) for stage in extra_stages ] return stages_todo
def read_image(filename, runtime_context): try: frame_class = import_utils.import_attribute( runtime_context.FRAME_CLASS) image = frame_class(runtime_context, filename=filename) if image.instrument is None: logger.error("Image instrument attribute is None, aborting", image=image) raise IOError munge(image) return image except Exception: logger.error('Error loading image: {error}'.format( error=logs.format_exception()), extra_tags={'filename': filename})
def add_bpm(): parser = argparse.ArgumentParser(description="Add a bad pixel mask to the db.") parser.add_argument('--filename', help='Full path to Bad Pixel Mask file') parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning', 'critical', 'fatal', 'error']) parser.add_argument('--db-address', dest='db_address', default='mysql://*****:*****@localhost/test', help='Database address: Should be in SQLAlchemy form') args = parser.parse_args() add_settings_to_context(args, banzai_nres.settings) logs.set_log_level(args.log_level) frame_factory = import_utils.import_attribute(banzai_nres.settings.FRAME_FACTORY)() bpm_image = frame_factory.open({'path': args.filename}, args) bpm_image.is_master = True banzai.dbs.save_calibration_info(bpm_image.to_db_record(DataProduct(None, filename=os.path.basename(args.filename), filepath=os.path.dirname(args.filename))), args.db_address)
def add_bpms_from_archive(): parser = argparse.ArgumentParser(description="Add bad pixel mask from a given archive api") parser.add_argument('--db-address', dest='db_address', default='mysql://*****:*****@localhost/test', help='Database address: Should be in SQLAlchemy form') args = parser.parse_args() add_settings_to_context(args, banzai_nres.settings) # Query the archive for all bpm files url = f'{banzai_nres.settings.ARCHIVE_FRAME_URL}/?OBSTYPE=BPM' archive_auth_header = banzai_nres.settings.ARCHIVE_AUTH_HEADER response = requests.get(url, headers=archive_auth_header) response.raise_for_status() results = response.json()['results'] # Load each one, saving the calibration info for each frame_factory = import_utils.import_attribute(banzai_nres.settings.FRAME_FACTORY)() for frame in results: frame['frameid'] = frame['id'] bpm_image = frame_factory.open(frame, args) if bpm_image is not None: bpm_image.is_master = True banzai.dbs.save_calibration_info(bpm_image.to_db_record(DataProduct(None, filename=bpm_image.filename, filepath=None)), args.db_address)
def find_object_in_catalog(image, db_address, gaia_class, simbad_class): """ Find the object in external catalogs. Update the ra and dec if found. Also add an initial classification if found. :return: """ # Assume that the equinox and input epoch are both j2000. # Gaia uses an equinox of 2000, but epoch of 2015.5 for the proper motion coordinate = SkyCoord(ra=image.ra, dec=image.dec, unit=(units.deg, units.deg), frame='icrs', pm_ra_cosdec=image.pm_ra * units.mas / units.year, pm_dec=image.pm_dec * units.mas / units.year, equinox='j2000', obstime=Time(2000.0, format='decimalyear')) transformed_coordinate = coordinate.apply_space_motion( new_obstime=Time(2015.5, format='decimalyear')) with warnings.catch_warnings(): warnings.simplefilter("ignore") # 10 arcseconds should be a large enough radius to capture bright objects. gaia = import_utils.import_attribute(gaia_class) gaia_connection = gaia() gaia_connection.ROW_LIMIT = 200 results = gaia_connection.query_object( coordinate=transformed_coordinate, radius=10.0 * units.arcsec) # Filter out objects fainter than r=12 and brighter than r = 5. # There is at least one case (gamma cas) that is in gaia but does not have a complete catalog record like proper # motions and effective temperatures. results = results[np.logical_and(results['phot_rp_mean_mag'] < 12.0, results['phot_rp_mean_mag'] > 5.0)] if len(results) > 0: # convert the luminosity from the LSun units that Gaia provides to cgs units results[0]['lum_val'] *= constants.L_sun.to('erg / s').value image.classification = dbs.get_closest_HR_phoenix_models( db_address, results[0]['teff_val'], results[0]['lum_val']) # Update the ra and dec to the catalog coordinates as those are basically always better than a user enters # manually. image.ra, image.dec = results[0]['ra'], results[0]['dec'] if results[0]['pmra'] is not np.ma.masked: image.pm_ra, image.pm_dec = results[0]['pmra'], results[0]['pmdec'] # If nothing in Gaia fall back to simbad. This should only be for stars that are brighter than mag = 3 else: # IMPORTANT NOTE: # During e2e tests we do not import astroquery.simbad.Simbad. We import a mocked simbad call # which can be found in banzai_nres.tests.utils.MockSimbad . This returns a simbad table that is # truncated. If you add a new votable field, you will need to add it to the mocked table as well. simbad = import_utils.import_attribute(simbad_class) simbad_connection = simbad() simbad_connection.add_votable_fields('pmra', 'pmdec', 'fe_h', 'otype') try: results = simbad_connection.query_region(coordinate, radius='0d0m10s') except astroquery.exceptions.TableParseError: response = simbad_connection.last_response.content logger.error( f'Error querying SIMBAD. Response from SIMBAD: {response}', image=image) results = [] if results: results = remove_planets_from_simbad(results) results = results[0] # get the closest source. image.classification = dbs.get_closest_phoenix_models( db_address, results['Fe_H_Teff'], results['Fe_H_log_g'])[0] # note that we always assume the proper motions are in mas/yr... which they should be. if results['PMRA'] is not np.ma.masked: image.pm_ra, image.pm_dec = results['PMRA'], results['PMDEC'] # Update the ra and dec to the catalog coordinates as those will be consistent across observations. # Simbad always returns h:m:s, d:m:s, for ra, dec. If for some reason simbad does not, these coords will be # very wrong and barycenter correction will be very wrong. coord = SkyCoord(results['RA'], results['DEC'], unit=(units.hourangle, units.deg)) image.ra, image.dec = coord.ra.deg, coord.dec.deg
from banzai import settings, logs from banzai import logs from banzai import dbs from banzai.images import logger from banzai.munge import munge from banzai.utils.fits_utils import get_primary_header from banzai.utils.instrument_utils import instrument_passes_criteria from banzai.utils import import_utils from banzai.exceptions import InhomogeneousSetException logger = logging.getLogger('banzai') FRAME_CLASS = import_utils.import_attribute(settings.FRAME_CLASS) def get_obstype(header): return header.get('OBSTYPE', None) def get_reduction_level(header): return header.get('RLEVEL', '00') def select_images(image_list, image_type, db_address, ignore_schedulability): images = [] for filename in image_list: try: header = get_primary_header(filename)
import logging import abc import os import numpy as np from astropy.io import fits from banzai.stages import Stage, MultiFrameStage from banzai import dbs, logs, settings from banzai.utils import image_utils, stats, fits_utils, qc, date_utils, import_utils, file_utils import datetime FRAME_CLASS = import_utils.import_attribute(settings.FRAME_CLASS) logger = logging.getLogger('banzai') class CalibrationMaker(MultiFrameStage): def __init__(self, runtime_context): super(CalibrationMaker, self).__init__(runtime_context) def group_by_attributes(self): return settings.CALIBRATION_SET_CRITERIA.get( self.calibration_type.upper(), []) @property @abc.abstractmethod def calibration_type(self): pass @abc.abstractmethod