Exemplo n.º 1
0
def parse_args(extra_console_arguments=None, parser_description='Process LCO data.'):
    """Parse arguments, including default command line argument, and set the overall log level"""

    parser = argparse.ArgumentParser(description=parser_description)

    parser.add_argument("--processed-path", default='/archive/engineering',
                        help='Top level directory where the processed data will be stored')
    parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning',
                                                                 'critical', 'fatal', 'error'])
    parser.add_argument('--post-to-archive', dest='post_to_archive', action='store_true',
                        default=False)
    parser.add_argument('--post-to-elasticsearch', dest='post_to_elasticsearch', action='store_true',
                        default=False)
    parser.add_argument('--fpack', dest='fpack', action='store_true', default=False,
                        help='Fpack the output files?')
    parser.add_argument('--rlevel', dest='rlevel', default=91, type=int, help='Reduction level')
    parser.add_argument('--db-address', dest='db_address',
                        default='mysql://*****:*****@localhost/test',
                        help='Database address: Should be in SQLAlchemy form')
    parser.add_argument('--elasticsearch-url', dest='elasticsearch_url',
                        default='http://elasticsearch.lco.gtn:9200')
    parser.add_argument('--es-index', dest='elasticsearch_qc_index', default='banzai_qc',
                        help='ElasticSearch index to use for QC results')
    parser.add_argument('--es-doc-type', dest='elasticsearch_doc_type', default='qc',
                        help='Elasticsearch document type for QC records')
    parser.add_argument('--no-bpm', dest='no_bpm', default=False, action='store_true',
                        help='Do not use a bad pixel mask to reduce data (BPM contains all zeros)')
    parser.add_argument('--ignore-schedulability', dest='ignore_schedulability',
                        default=False, action='store_true',
                        help='Relax requirement that the instrument be schedulable')
    parser.add_argument('--use-only-older-calibrations', dest='use_only_older_calibrations', default=False,
                        action='store_true', help='Only use calibrations that were created before the start of the block')
    parser.add_argument('--preview-mode', dest='preview_mode', default=False, action='store_true',
                        help='Save the reductions to the preview directory')
    parser.add_argument('--max-tries', dest='max_tries', default=5,
                        help='Maximum number of times to try to process a frame')
    parser.add_argument('--broker-url', dest='broker_url',
                        help='URL for the FITS broker service.')

    if extra_console_arguments is None:
        extra_console_arguments = []
    for argument in extra_console_arguments:
        parser.add_argument(*argument['args'], **argument['kwargs'])
    args = parser.parse_args()

    logs.set_log_level(args.log_level)

    # Get all of the settings that are not builtins and store them in the context object
    for setting in dir(settings):
        if '__' != setting[:2] and not isinstance(getattr(settings, setting), ModuleType):
            setattr(args, setting, getattr(settings, setting))

    return Context(args)
Exemplo n.º 2
0
def populate_phoenix_models():
    parser = argparse.ArgumentParser("Populate the database with the Phoenix models.\n\n"
                                     "This only needs to be run once on initialization of the database.")
    parser.add_argument('--model-location', dest='model_location',
                        help='Location of the phoenix models. \
                        This should either be s3://bucket-name or an absolute directory path.')
    parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning',
                                                                 'critical', 'fatal', 'error'])
    parser.add_argument('--db-address', dest='db_address',
                        default='sqlite3:///test.db',
                        help='Database address: Should be in SQLAlchemy form')
    args = parser.parse_args()
    add_settings_to_context(args, banzai_nres.settings)
    logs.set_log_level(args.log_level)

    dbs.populate_phoenix_models(args.model_location, args)
Exemplo n.º 3
0
def update_db():
    parser = argparse.ArgumentParser(description="Query the configdb to ensure that the instruments table"
                                                 "has the most up-to-date information")

    parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning',
                                                                 'critical', 'fatal', 'error'])
    parser.add_argument('--db-address', dest='db_address',
                        default='mysql://*****:*****@localhost/test',
                        help='Database address: Should be in SQLAlchemy form')
    args = parser.parse_args()
    logs.set_log_level(args.log_level)

    try:
        dbs.populate_instrument_tables(db_address=args.db_address)
    except Exception:
        logger.error('Could not populate instruments table: {error}'.format(error=logs.format_exception()))
Exemplo n.º 4
0
def add_bpm():
    parser = argparse.ArgumentParser(description="Add a bad pixel mask to the db.")
    parser.add_argument('--filename', help='Full path to Bad Pixel Mask file')
    parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning',
                                                                 'critical', 'fatal', 'error'])
    parser.add_argument('--db-address', dest='db_address',
                        default='mysql://*****:*****@localhost/test',
                        help='Database address: Should be in SQLAlchemy form')
    args = parser.parse_args()
    add_settings_to_context(args, banzai_nres.settings)
    logs.set_log_level(args.log_level)
    frame_factory = import_utils.import_attribute(banzai_nres.settings.FRAME_FACTORY)()
    bpm_image = frame_factory.open({'path': args.filename}, args)
    bpm_image.is_master = True
    banzai.dbs.save_calibration_info(bpm_image.to_db_record(DataProduct(None, filename=os.path.basename(args.filename),
                                                                        filepath=os.path.dirname(args.filename))),
                                     args.db_address)
Exemplo n.º 5
0
def mark_frame(mark_as):
    parser = argparse.ArgumentParser(description="Set the is_bad flag to mark the frame as {mark_as}"
                                                 "for a calibration frame in the database ".format(mark_as=mark_as))
    parser.add_argument('--filename', dest='filename', required=True,
                        help='Name of calibration file to be marked')
    parser.add_argument('--db-address', dest='db_address',
                        default='mysql://*****:*****@localhost/test',
                        help='Database address: Should be in SQLAlchemy form')
    parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning',
                                                                 'critical', 'fatal', 'error'])

    args = parser.parse_args()
    logs.set_log_level(args.log_level)

    logger.info("Marking the frame {filename} as {mark_as}".format(filename=args.filename, mark_as=mark_as))
    dbs.mark_frame(args.filename, mark_as, db_address=args.db_address)
    logger.info("Finished")
Exemplo n.º 6
0
def create_db():
    """
    Create the database structure.

    This only needs to be run once on initialization of the database.
    """
    parser = argparse.ArgumentParser("Create the database.\n\n"
                                     "This only needs to be run once on initialization of the database.")

    parser.add_argument("--log-level", default='debug', choices=['debug', 'info', 'warning',
                                                                 'critical', 'fatal', 'error'])
    parser.add_argument('--db-address', dest='db_address',
                        default='sqlite3:///test.db',
                        help='Database address: Should be in SQLAlchemy form')
    args = parser.parse_args()
    logs.set_log_level(args.log_level)

    dbs.create_db(args.db_address)
Exemplo n.º 7
0
def setup_loggers(*args, **kwargs):
    logs.set_log_level(os.getenv('BANZAI_WORKER_LOGLEVEL', 'INFO'))
Exemplo n.º 8
0
def migrate_db():

    parser = argparse.ArgumentParser()
    parser.add_argument(
        'old_db_address',
        help='Old database address to be migrated: Should be in SQLAlchemy form'
    )
    parser.add_argument(
        'new_db_address',
        help='New database address: Should be in SQLAlchemy form')
    parser.add_argument(
        "--log-level",
        default='debug',
        choices=['debug', 'info', 'warning', 'critical', 'fatal', 'error'])
    args = parser.parse_args()

    logs.set_log_level(args.log_level)
    logger.info(
        "Creating new DB {new_db_address} from old DB {old_db_address}".format(
            new_db_address=args.new_db_address,
            old_db_address=args.old_db_address))
    create_new_db(args.new_db_address)

    with dbs.get_session(
            db_address=args.old_db_address) as old_db_session, dbs.get_session(
                db_address=args.new_db_address) as new_db_session:

        # First copy sites table
        logger.info("Querying and organizing the old Site table")
        sites = base_to_dict(old_db_session.query(Site).all())
        logger.info(
            "Adding {n} rows from the old Site table to the new Site table".
            format(n=len(sites)))
        add_rows(new_db_session, dbs.Site, sites)

        # Move Telescope to Instrument with a couple of variable renames
        logger.info("Querying and organizing the old Telescope table")
        telescopes = base_to_dict(old_db_session.query(Telescope).all())
        change_key_name(telescopes, 'instrument', 'camera')
        change_key_name(telescopes, 'camera_type', 'type')
        logger.info(
            "Adding {n} rows from the old Telescope table to the new Instrument table"
            .format(n=len(telescopes)))
        add_rows(new_db_session, dbs.Instrument, telescopes)

        # Move old BPMs to CalibrationImage
        logger.info("Querying and organizing the old BadPixelMask table")
        bpms = base_to_dict(old_db_session.query(BadPixelMask).all())
        for row in bpms:
            row['type'] = 'BPM'
            row['is_master'] = True
            row['attributes'] = {'ccdsum': row.pop('ccdsum')}
            del (row['id'])
        change_key_name(bpms, 'creation_date', 'dateobs')
        change_key_name(bpms, 'telescope_id', 'instrument_id')
        # BPMs have some duplicates, remove them
        already_seen = []
        bpms_pruned = []
        for row in bpms:
            if row['filename'] not in already_seen:
                bpms_pruned.append(row)
                already_seen.append(row['filename'])
        logger.info(
            "Adding {n} rows from the old BadPixelMask table to the new CalibrationImage table"
            .format(n=len(bpms_pruned)))
        add_rows(new_db_session, dbs.CalibrationImage, bpms_pruned)

        # Convert old CalibrationImage to new type
        logger.info("Querying and organizing the old CalibrationsImage table")
        calibrations = base_to_dict(
            old_db_session.query(CalibrationImage).all())
        for row in calibrations:
            row['is_master'] = True
            row['attributes'] = {
                'filter': row.pop('filter_name'),
                'ccdsum': row.pop('ccdsum')
            }
            del (row['id'])
        change_key_name(calibrations, 'dayobs', 'dateobs')
        change_key_name(calibrations, 'telescope_id', 'instrument_id')
        logger.info(
            "Adding {n} rows from the old CalibrationImage table to the new CalibrationImage table"
            .format(n=len(calibrations)))
        add_rows(new_db_session, dbs.CalibrationImage, calibrations)

        # Copy the PreviewImage table to ProcssedImage (attributes are all the same)
        logger.info("Querying and organizing the old PreviewImage table")
        preview_images = base_to_dict(old_db_session.query(PreviewImage).all())
        logger.info(
            "Adding {n} rows from the old PreviewImage table to the new ProcessedImage table"
            .format(n=len(preview_images)))
        add_rows(new_db_session, dbs.ProcessedImage, preview_images)

        logger.info("Finished")