Exemplo n.º 1
0
def reduce_night():
    parser = argparse.ArgumentParser(
        description='Reduce all the data from a site at the end of a night.')
    parser.add_argument('--site', dest='site', help='Site code (e.g. ogg)')
    parser.add_argument('--dayobs', dest='dayobs',
                        default=None, help='Day-Obs to reduce (e.g. 20160201)')
    parser.add_argument('--raw-path-root', dest='rawpath_root', default='/archive/engineering',
                        help='Top level directory with raw data.')
    parser.add_argument("--processed-path", default='/archive/engineering',
                        help='Top level directory where the processed data will be stored')

    parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning',
                                                                 'critical', 'fatal', 'error'])
    parser.add_argument('--post-to-archive', dest='post_to_archive', action='store_true',
                        default=False)
    parser.add_argument('--fpack', dest='fpack', action='store_true', default=False,
                        help='Fpack the output files?')

    parser.add_argument('--rlevel', dest='rlevel', default=91, help='Reduction level')
    parser.add_argument('--db-address', dest='db_address',
                        default='mysql://*****:*****@localhost/test',
                        help='Database address: Should be in SQLAlchemy form')

    args = parser.parse_args()

    args.preview_mode = False
    args.raw_path = None
    args.filename = None

    pipeline_context = PipelineContext(args)

    logs.start_logging(log_level=pipeline_context.log_level)

    # Ping the configdb to get currently schedulable telescopes
    try:
        dbs.populate_telescope_tables(db_address=pipeline_context.db_address)
    except Exception as e:
        logger.error('Could not connect to the configdb.')
        logger.error(e)

    timezone = dbs.get_timezone(args.site, db_address=args.db_address)

    telescopes = dbs.get_schedulable_telescopes(args.site, db_address=args.db_address)

    if timezone is not None:
        # If no dayobs is given, calculate it.
        if args.dayobs is None:
            args.dayobs = date_utils.get_dayobs(timezone=timezone)

        # For each telescope at the given site
        for telescope in telescopes:
            pipeline_context.raw_path = os.path.join(args.rawpath_root, args.site,
                                                     telescope.instrument, args.dayobs, 'raw')
            # Run the reductions on the given dayobs
            make_master_bias(pipeline_context)
            make_master_dark(pipeline_context)
            make_master_flat(pipeline_context)
            reduce_science_frames(pipeline_context)

    logs.stop_logging()
Exemplo n.º 2
0
def create_master_calibrations():
    pipeline_context = parse_end_of_night_command_line_arguments()
    logs.start_logging(log_level=pipeline_context.log_level)
    make_master_bias()
    make_master_dark()
    make_master_flat()
    logs.stop_logging()
Exemplo n.º 3
0
def create_master_calibrations():
    pipeline_context = parse_end_of_night_command_line_arguments()
    logs.start_logging(log_level=pipeline_context.log_level)
    make_master_bias()
    make_master_dark()
    make_master_flat()
    logs.stop_logging()
Exemplo n.º 4
0
def make_master_flat(cmd_args=None):
    pipeline_context = parse_command_line_arguments(cmd_args=cmd_args)
    logs.start_logging(log_level=pipeline_context.log_level)
    stages_to_do = [munge.DataMunger, crosstalk.CrosstalkCorrector, bias.OverscanSubtractor,
                    gain.GainNormalizer, mosaic.MosaicCreator, trim.Trimmer, bias.BiasSubtractor,
                    dark.DarkSubtractor, flats.FlatMaker, headers.HeaderUpdater]
    run(stages_to_do, pipeline_context, image_types=['SKYFLAT'], calibration_maker=True,
        log_message='Making Master Flat')
    logs.stop_logging()
Exemplo n.º 5
0
def run_preview_pipeline():
    parser = argparse.ArgumentParser(
        description='Make master calibration frames from LCOGT imaging data.')

    parser.add_argument("--processed-path", default='/archive/engineering',
                        help='Top level directory where the processed data will be stored')
    parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning',
                                                                 'critical', 'fatal', 'error'])
    parser.add_argument('--post-to-archive', dest='post_to_archive', action='store_true',
                        default=False)
    parser.add_argument('--db-address', dest='db_address',
                        default='mysql://*****:*****@localhost/test',
                        help='Database address: Should be in SQLAlchemy form')
    parser.add_argument('--fpack', dest='fpack', action='store_true', default=False,
                        help='Fpack the output files?')
    parser.add_argument('--rlevel', dest='rlevel', default=11, help='Reduction level')

    parser.add_argument('--n-processes', dest='n_processes', default=12,
                        help='Number of listener processes to spawn.')

    parser.add_argument('--broker-url', dest='broker_url',
                        default='amqp://*****:*****@cerberus.lco.gtn',
                        help='URL for the broker service.')
    parser.add_argument('--queue-name', dest='queue_name', default='preview_pipeline',
                        help='Name of the queue to listen to from the fits exchange.')
    parser.add_argument('--max-preview-tries', dest='max_preview_tries', default=5,
                        help='Maximum number of tries to produce a preview image.')
    args = parser.parse_args()
    args.preview_mode = True
    args.raw_path = None
    args.filename = None
    pipeline_context = PipelineContext(args)

    logs.start_logging(log_level=pipeline_context.log_level)

    try:
        dbs.populate_telescope_tables(db_address=pipeline_context.db_address)
    except Exception as e:
        logger.error('Could not connect to the configdb.')
        logger.error(e)

    logger.info('Starting pipeline preview mode listener')

    for i in range(args.n_processes):
        p = multiprocessing.Process(target=run_indiviudal_listener, args=(args.broker_url,
                                                                          args.queue_name,
                                                                          PipelineContext(args)))
        p.start()

    logs.stop_logging()
Exemplo n.º 6
0
def reduce_science_frames(cmd_args=None):

    pipeline_context = parse_command_line_arguments(cmd_args=cmd_args)
    logs.start_logging(log_level=pipeline_context.log_level)
    stages_to_do = [munge.DataMunger, crosstalk.CrosstalkCorrector, bias.OverscanSubtractor,
                    gain.GainNormalizer, mosaic.MosaicCreator, trim.Trimmer, bias.BiasSubtractor,
                    dark.DarkSubtractor, flats.FlatDivider, photometry.SourceDetector,
                    astrometry.WCSSolver, headers.HeaderUpdater]

    image_list = file_utils.make_image_list(pipeline_context)
    for image in image_list:
        pipeline_context.filename = os.path.basename(image)
        run(stages_to_do, pipeline_context, image_types=['EXPOSE', 'STANDARD'],
            log_message='Reducing Science Frames')
    logs.stop_logging()
Exemplo n.º 7
0
def run_preview_pipeline(cmd_args=None):
    pipeline_context = parse_command_line_arguments(cmd_args=cmd_args)
    logs.start_logging(log_level=pipeline_context.log_level)
    logger.info('Starting pipeline preview mode listener')
    crawl_exchange = Exchange('fits_files', type='fanout')

    listener = PreviewModeListener('amqp://*****:*****@cerberus.lco.gtn', pipeline_context)

    with Connection(listener.broker_url) as connection:
        listener.connection = connection
        listener.queue = Queue('preview_pipeline', crawl_exchange)
        try:
            listener.run()
        except KeyboardInterrupt:
            logger.info('Shutting down preview pipeline listener...')
            logs.stop_logging()
            sys.exit(0)
Exemplo n.º 8
0
def make_master_flat_console():
    pipeline_context = parse_end_of_night_command_line_arguments()
    logs.start_logging(log_level=pipeline_context.log_level)
    make_master_flat(pipeline_context)
    logs.stop_logging()
Exemplo n.º 9
0
def reduce_science_frames_console():
    pipeline_context = parse_end_of_night_command_line_arguments()
    logs.start_logging(log_level=pipeline_context.log_level)
    reduce_science_frames(pipeline_context)
    logs.stop_logging()
Exemplo n.º 10
0
def run_end_of_night_from_console(scripts_to_run):
    pipeline_context = parse_end_of_night_command_line_arguments()
    logs.start_logging(log_level=pipeline_context.log_level)
    for script in scripts_to_run:
        script(pipeline_context)
    logs.stop_logging()
Exemplo n.º 11
0
def run_preview_pipeline():
    parser = argparse.ArgumentParser(
        description='Make master calibration frames from LCOGT imaging data.')

    parser.add_argument(
        "--processed-path",
        default='/archive/engineering',
        help='Top level directory where the processed data will be stored')
    parser.add_argument(
        "--log-level",
        default='debug',
        choices=['debug', 'info', 'warning', 'critical', 'fatal', 'error'])
    parser.add_argument('--post-to-archive',
                        dest='post_to_archive',
                        action='store_true',
                        default=False)
    parser.add_argument('--db-address',
                        dest='db_address',
                        default='mysql://*****:*****@localhost/test',
                        help='Database address: Should be in SQLAlchemy form')
    parser.add_argument('--fpack',
                        dest='fpack',
                        action='store_true',
                        default=False,
                        help='Fpack the output files?')
    parser.add_argument('--rlevel',
                        dest='rlevel',
                        default=11,
                        help='Reduction level')

    parser.add_argument('--n-processes',
                        dest='n_processes',
                        default=12,
                        help='Number of listener processes to spawn.',
                        type=int)

    parser.add_argument('--broker-url',
                        dest='broker_url',
                        default='amqp://*****:*****@rabbitmq.lco.gtn:5672/',
                        help='URL for the broker service.')
    parser.add_argument(
        '--queue-name',
        dest='queue_name',
        default='preview_pipeline',
        help='Name of the queue to listen to from the fits exchange.')
    parser.add_argument(
        '--max-preview-tries',
        dest='max_preview_tries',
        default=5,
        help='Maximum number of tries to produce a preview image.')
    args = parser.parse_args()
    args.preview_mode = True
    args.raw_path = None
    args.filename = None
    pipeline_context = PipelineContext(args)

    logs.start_logging(log_level=pipeline_context.log_level)

    try:
        dbs.populate_telescope_tables(db_address=pipeline_context.db_address)
    except Exception as e:
        logger.error('Could not connect to the configdb.')
        logger.error(e)

    logger.info('Starting pipeline preview mode listener')

    for i in range(args.n_processes):
        p = multiprocessing.Process(target=run_indiviudal_listener,
                                    args=(args.broker_url, args.queue_name,
                                          PipelineContext(args)))
        p.start()

    logs.stop_logging()
Exemplo n.º 12
0
def reduce_night():
    parser = argparse.ArgumentParser(
        description='Reduce all the data from a site at the end of a night.')
    parser.add_argument('--site', dest='site', help='Site code (e.g. ogg)')
    parser.add_argument('--dayobs',
                        dest='dayobs',
                        default=None,
                        help='Day-Obs to reduce (e.g. 20160201)')
    parser.add_argument('--raw-path-root',
                        dest='rawpath_root',
                        default='/archive/engineering',
                        help='Top level directory with raw data.')
    parser.add_argument(
        "--processed-path",
        default='/archive/engineering',
        help='Top level directory where the processed data will be stored')

    parser.add_argument(
        "--log-level",
        default='debug',
        choices=['debug', 'info', 'warning', 'critical', 'fatal', 'error'])
    parser.add_argument('--post-to-archive',
                        dest='post_to_archive',
                        action='store_true',
                        default=False)
    parser.add_argument('--fpack',
                        dest='fpack',
                        action='store_true',
                        default=False,
                        help='Fpack the output files?')

    parser.add_argument('--rlevel',
                        dest='rlevel',
                        default=91,
                        help='Reduction level')
    parser.add_argument('--db-address',
                        dest='db_address',
                        default='mysql://*****:*****@localhost/test',
                        help='Database address: Should be in SQLAlchemy form')

    args = parser.parse_args()

    args.preview_mode = False
    args.raw_path = None
    args.filename = None
    args.max_preview_tries = 5

    pipeline_context = PipelineContext(args)

    logs.start_logging(log_level=pipeline_context.log_level)

    # Ping the configdb to get currently schedulable telescopes
    try:
        dbs.populate_telescope_tables(db_address=pipeline_context.db_address)
    except Exception as e:
        logger.error('Could not connect to the configdb.')
        logger.error(e)

    timezone = dbs.get_timezone(args.site, db_address=args.db_address)

    telescopes = dbs.get_schedulable_telescopes(args.site,
                                                db_address=args.db_address)

    if timezone is not None:
        # If no dayobs is given, calculate it.
        if args.dayobs is None:
            args.dayobs = date_utils.get_dayobs(timezone=timezone)

        # For each telescope at the given site
        for telescope in telescopes:
            pipeline_context.raw_path = os.path.join(args.rawpath_root,
                                                     args.site,
                                                     telescope.instrument,
                                                     args.dayobs, 'raw')
            try:
                # Run the reductions on the given dayobs
                make_master_bias(pipeline_context)
                make_master_dark(pipeline_context)
                make_master_flat(pipeline_context)
                reduce_science_frames(pipeline_context)
            except Exception as e:
                logger.error(e)
    logs.stop_logging()
Exemplo n.º 13
0
def make_master_flat_console():
    pipeline_context = parse_end_of_night_command_line_arguments()
    logs.start_logging(log_level=pipeline_context.log_level)
    make_master_flat(pipeline_context)
    logs.stop_logging()
Exemplo n.º 14
0
def reduce_science_frames_console():
    pipeline_context = parse_end_of_night_command_line_arguments()
    logs.start_logging(log_level=pipeline_context.log_level)
    reduce_science_frames(pipeline_context)
    logs.stop_logging()
Exemplo n.º 15
0
def run_end_of_night_from_console(scripts_to_run):
    pipeline_context = parse_end_of_night_command_line_arguments()
    logs.start_logging(log_level=pipeline_context.log_level)
    for script in scripts_to_run:
        script(pipeline_context)
    logs.stop_logging()