def test_add_or_update(): with dbs.get_session(db_address='sqlite:///test.db') as db_session: # Add a fake telescope dbs.add_or_update_record( db_session, dbs.Instrument, { 'site': 'bpl', 'camera': 'kb101', 'enclosure': 'doma', 'telescope': '1m0a' }, { 'site': 'bpl', 'camera': 'kb101', 'enclosure': 'doma', 'telescope': '1m0a', 'type': 'SBig', 'schedulable': False, 'name': 'kb101' }) db_session.commit() # Make sure it got added query = db_session.query( dbs.Instrument).filter(dbs.Instrument.site == 'bpl') telescope = query.filter(dbs.Instrument.camera == 'kb101').first() assert telescope is not None # Update the fake telescope dbs.add_or_update_record( db_session, dbs.Instrument, { 'site': 'bpl', 'camera': 'kb101', 'enclosure': 'doma', 'telescope': '1m0a' }, { 'site': 'bpl', 'camera': 'kb101', 'enclosure': 'doma', 'telescope': '1m0a', 'type': 'SBig', 'schedulable': True, 'name': 'kb101' }) db_session.commit() # Make sure the update took query = db_session.query( dbs.Instrument).filter(dbs.Instrument.site == 'bpl') telescope = query.filter(dbs.Instrument.camera == 'kb101').first() assert telescope is not None assert telescope.schedulable # make sure there is only one new telescope in the table query = db_session.query( dbs.Instrument).filter(dbs.Instrument.site == 'bpl') telescopes = query.filter(dbs.Instrument.camera == 'kb101').all() assert len(telescopes) == 1 # Clean up for other methods db_session.delete(telescope) db_session.commit()
def get_instrument_ids(db_address, names): with get_session(db_address) as db_session: instruments = [] for name in names: criteria = dbs.Instrument.name == name instruments.extend(db_session.query(dbs.Instrument).filter(criteria).all()) return [instrument.id for instrument in instruments]
def test_add_or_update(): with dbs.get_session(db_address='sqlite:///test.db') as db_session: # Add a fake telescope dbs.add_or_update_record(db_session, dbs.Instrument, {'site': 'bpl', 'camera': 'kb101', 'enclosure': 'doma', 'telescope': '1m0a'}, {'site': 'bpl', 'camera': 'kb101', 'enclosure': 'doma', 'telescope': '1m0a', 'type': 'SBig', 'schedulable': False}) db_session.commit() # Make sure it got added query = db_session.query(dbs.Instrument).filter(dbs.Instrument.site == 'bpl') telescope = query.filter(dbs.Instrument.camera == 'kb101').first() assert telescope is not None # Update the fake telescope dbs.add_or_update_record(db_session, dbs.Instrument, {'site': 'bpl', 'camera': 'kb101', 'enclosure': 'doma', 'telescope': '1m0a'}, {'site': 'bpl', 'camera': 'kb101', 'enclosure': 'doma', 'telescope': '1m0a', 'type': 'SBig', 'schedulable': True}) db_session.commit() # Make sure the update took query = db_session.query(dbs.Instrument).filter(dbs.Instrument.site == 'bpl') telescope = query.filter(dbs.Instrument.camera == 'kb101').first() assert telescope is not None assert telescope.schedulable # make sure there is only one new telescope in the table query = db_session.query(dbs.Instrument).filter(dbs.Instrument.site == 'bpl') telescopes = query.filter(dbs.Instrument.camera == 'kb101').all() assert len(telescopes) == 1 # Clean up for other methods db_session.delete(telescope) db_session.commit()
def run_check_if_stacked_calibrations_are_in_db(raw_filenames, calibration_type): number_of_stacks_that_should_have_been_created = get_expected_number_of_calibrations(raw_filenames, calibration_type) with get_session(os.environ['DB_ADDRESS']) as db_session: calibrations_in_db = db_session.query(CalibrationImage).filter(CalibrationImage.type == calibration_type) calibrations_in_db = calibrations_in_db.filter(CalibrationImage.is_master).all() assert number_of_stacks_that_should_have_been_created > 0 assert len(calibrations_in_db) == number_of_stacks_that_should_have_been_created
def get_calibration_filename(self, image): calibration_criteria = dbs.CalibrationImage.type == self.calibration_type.upper() calibration_criteria &= dbs.CalibrationImage.telescope_id == image.telescope_id for criterion in self.group_by_keywords: if criterion == 'filter': calibration_criteria &= dbs.CalibrationImage.filter_name == getattr(image, criterion) else: calibration_criteria &= getattr(dbs.CalibrationImage, criterion) == getattr(image, criterion) db_session = dbs.get_session(db_address=self.pipeline_context.db_address) calibration_query = db_session.query(dbs.CalibrationImage).filter(calibration_criteria) epoch_datetime = date_utils.epoch_string_to_date(image.epoch) find_closest = func.DATEDIFF(epoch_datetime, dbs.CalibrationImage.dayobs) find_closest = func.ABS(find_closest) calibration_query = calibration_query.order_by(find_closest.asc()) calibration_image = calibration_query.first() if calibration_image is None: calibration_file = None else: calibration_file = os.path.join(calibration_image.filepath, calibration_image.filename) db_session.close() return calibration_file
def mock_phoenix_models_in_db(db_address): with open(PHOENIX_FILENAME) as f: phoenix_data = json.load(f) with dbs.get_session(db_address) as db_session: db_session.bulk_insert_mappings(banzai_nres.dbs.PhoenixModel, phoenix_data) dbs.add_or_update_record(db_session, banzai_nres.dbs.ResourceFile, {'key': 'phoenix_wavelengths'}, {'filename': 'phoenix_wavelength.fits', 'location': 's3://banzai-nres-phoenix-models-lco-global', 'key': 'phoenix_wavelengths'})
def test_add_or_update(): db_session = dbs.get_session(db_address='sqlite:///test.db') # Add a fake telescope dbs.add_or_update_record(db_session, dbs.Telescope, { 'site': 'bpl', 'instrument': 'kb101' }, { 'site': 'bpl', 'instrument': 'kb101', 'camera_type': 'SBig', 'schedulable': False }) db_session.commit() # Make sure it got added query = db_session.query(dbs.Telescope).filter(dbs.Telescope.site == 'bpl') telescope = query.filter(dbs.Telescope.instrument == 'kb101').first() assert telescope is not None # Update the fake telescope dbs.add_or_update_record(db_session, dbs.Telescope, { 'site': 'bpl', 'instrument': 'kb101' }, { 'site': 'bpl', 'instrument': 'kb101', 'camera_type': 'SBig', 'schedulable': True }) db_session.commit() # Make sure the update took query = db_session.query(dbs.Telescope).filter(dbs.Telescope.site == 'bpl') telescope = query.filter(dbs.Telescope.instrument == 'kb101').first() assert telescope is not None assert telescope.schedulable # make sure there is only one new telescope in the table query = db_session.query(dbs.Telescope).filter(dbs.Telescope.site == 'bpl') telescopes = query.filter(dbs.Telescope.instrument == 'kb101').all() assert len(telescopes) == 1 # Clean up for other methods db_session.delete(telescope) db_session.commit() db_session.close()
def add_instrument(): parser = argparse.ArgumentParser(description="Add a new instrument to the database") parser.add_argument("--site", help='Site code (e.g. ogg)', required=True) parser.add_argument('--enclosure', help= 'Enclosure code (e.g. clma)', required=True) parser.add_argument('--telescope', help='Telescope code (e.g. 0m4a)', required=True) parser.add_argument("--camera", help='Camera (e.g. kb95)', required=True) parser.add_argument("--camera-type", dest='camera_type', help="Camera type (e.g. 1m0-SciCam-Sinistro)", required=True) parser.add_argument("--schedulable", help="Mark the instrument as schedulable", action='store_true', dest='schedulable', default=False) parser.add_argument('--db-address', dest='db_address', default='sqlite:///test.db', help='Database address: Should be in SQLAlchemy format') args = parser.parse_args() instrument = {'site': args.site, 'enclosure': args.enclosure, 'telescope': args.telescope, 'camera': args.camera, 'type': args.camera_type, 'schedulable': args.schedulable} with dbs.get_session(db_address=args.db_address) as db_session: dbs.add_instrument(instrument, db_session)
def migrate_db(): parser = argparse.ArgumentParser() parser.add_argument( 'old_db_address', help='Old database address to be migrated: Should be in SQLAlchemy form' ) parser.add_argument( 'new_db_address', help='New database address: Should be in SQLAlchemy form') parser.add_argument( "--log-level", default='debug', choices=['debug', 'info', 'warning', 'critical', 'fatal', 'error']) args = parser.parse_args() logs.set_log_level(args.log_level) logger.info( "Creating new DB {new_db_address} from old DB {old_db_address}".format( new_db_address=args.new_db_address, old_db_address=args.old_db_address)) create_new_db(args.new_db_address) with dbs.get_session( db_address=args.old_db_address) as old_db_session, dbs.get_session( db_address=args.new_db_address) as new_db_session: # First copy sites table logger.info("Querying and organizing the old Site table") sites = base_to_dict(old_db_session.query(Site).all()) logger.info( "Adding {n} rows from the old Site table to the new Site table". format(n=len(sites))) add_rows(new_db_session, dbs.Site, sites) # Move Telescope to Instrument with a couple of variable renames logger.info("Querying and organizing the old Telescope table") telescopes = base_to_dict(old_db_session.query(Telescope).all()) change_key_name(telescopes, 'instrument', 'camera') change_key_name(telescopes, 'camera_type', 'type') logger.info( "Adding {n} rows from the old Telescope table to the new Instrument table" .format(n=len(telescopes))) add_rows(new_db_session, dbs.Instrument, telescopes) # Move old BPMs to CalibrationImage logger.info("Querying and organizing the old BadPixelMask table") bpms = base_to_dict(old_db_session.query(BadPixelMask).all()) for row in bpms: row['type'] = 'BPM' row['is_master'] = True row['attributes'] = {'ccdsum': row.pop('ccdsum')} del (row['id']) change_key_name(bpms, 'creation_date', 'dateobs') change_key_name(bpms, 'telescope_id', 'instrument_id') # BPMs have some duplicates, remove them already_seen = [] bpms_pruned = [] for row in bpms: if row['filename'] not in already_seen: bpms_pruned.append(row) already_seen.append(row['filename']) logger.info( "Adding {n} rows from the old BadPixelMask table to the new CalibrationImage table" .format(n=len(bpms_pruned))) add_rows(new_db_session, dbs.CalibrationImage, bpms_pruned) # Convert old CalibrationImage to new type logger.info("Querying and organizing the old CalibrationsImage table") calibrations = base_to_dict( old_db_session.query(CalibrationImage).all()) for row in calibrations: row['is_master'] = True row['attributes'] = { 'filter': row.pop('filter_name'), 'ccdsum': row.pop('ccdsum') } del (row['id']) change_key_name(calibrations, 'dayobs', 'dateobs') change_key_name(calibrations, 'telescope_id', 'instrument_id') logger.info( "Adding {n} rows from the old CalibrationImage table to the new CalibrationImage table" .format(n=len(calibrations))) add_rows(new_db_session, dbs.CalibrationImage, calibrations) # Copy the PreviewImage table to ProcssedImage (attributes are all the same) logger.info("Querying and organizing the old PreviewImage table") preview_images = base_to_dict(old_db_session.query(PreviewImage).all()) logger.info( "Adding {n} rows from the old PreviewImage table to the new ProcessedImage table" .format(n=len(preview_images))) add_rows(new_db_session, dbs.ProcessedImage, preview_images) logger.info("Finished")