Exemple #1
0
    def set_centroids(self, source, slot, alg=8, apply_dt=True):
        """
        Assign centroids from ``source`` and ``slot`` to the objects centroid attributes
        (yag, zag, yag_times, zag_times)

        For the supported sources (ground, obc) the centroids are fetched from the mica L1
        archive or telemetry.

        yag, zag, yag_times an zag_times can also be set directly without use of this method.

        :param source: 'ground' | 'obc'
        :param slot: ACA slot
        :param alg: for ground processing, use centroids from this algorithm.
        :param apply_dt: apply centroid time offsets via 'set_offsets'
        """
        self.centroid_source = source
        self.centroid_dt = None
        start = self.start
        stop = self.stop
        # Get centroids from Ska eng archive or mica L1 archive
        if source == 'ground':
            acen_files = sorted(
                asp_l1.get_files(start=start, stop=stop, content=['ACACENT']))
            acen = vstack([Table.read(f) for f in sorted(acen_files)],
                          metadata_conflicts='silent')
            ok = ((acen['slot'] == slot) & (acen['alg'] == alg) &
                  (acen['status'] == 0) &
                  (acen['time'] >= DateTime(start).secs) &
                  (acen['time'] <= DateTime(stop).secs))
            yags = np.array(acen[ok]['ang_y'] * 3600)
            zags = np.array(acen[ok]['ang_z'] * 3600)
            yag_times = np.array(acen[ok]['time'])
            zag_times = np.array(acen[ok]['time'])
        elif source == 'obc':
            telem = fetch.Msidset(
                ['AOACYAN{}'.format(slot), 'AOACZAN{}'.format(slot)], start,
                stop)
            # Filter centroids for reasonble-ness
            yok = telem['AOACYAN{}'.format(slot)].vals > -3276
            zok = telem['AOACZAN{}'.format(slot)].vals > -3276
            yags = telem['AOACYAN{}'.format(slot)].vals[yok]
            yag_times = telem['AOACYAN{}'.format(slot)].times[yok]
            zags = telem['AOACZAN{}'.format(slot)].vals[zok]
            zag_times = telem['AOACZAN{}'.format(slot)].times[zok]
        else:
            raise ValueError("centroid_source must be 'obc' or 'ground'")
        self.yags = yags
        self.yag_times = yag_times
        self.zags = zags
        self.zag_times = zag_times
        if apply_dt is True:
            self.set_offsets()
        else:
            self.centroid_dt = 0
Exemple #2
0
    def set_centroids(self, source, slot, alg=8, apply_dt=True):
        """
        Assign centroids from ``source`` and ``slot`` to the objects centroid attributes
        (yag, zag, yag_times, zag_times)

        For the supported sources (ground, obc) the centroids are fetched from the mica L1
        archive or telemetry.

        yag, zag, yag_times an zag_times can also be set directly without use of this method.

        :param source: 'ground' | 'obc'
        :param slot: ACA slot
        :param alg: for ground processing, use centroids from this algorithm.
        :param apply_dt: apply centroid time offsets via 'set_offsets'
        """
        self.centroid_source = source
        self.centroid_dt = None
        start = self.start
        stop = self.stop
        # Get centroids from Ska eng archive or mica L1 archive
        if source == 'ground':
            acen_files = sorted(asp_l1.get_files(start=start, stop=stop, content=['ACACENT']))
            acen = vstack([Table.read(f) for f in sorted(acen_files)], metadata_conflicts='silent')
            ok = ((acen['slot'] == slot) & (acen['alg'] == alg) & (acen['status'] == 0)
                  & (acen['time'] >= DateTime(start).secs) & (acen['time'] <= DateTime(stop).secs))
            yags = np.array(acen[ok]['ang_y'] * 3600)
            zags = np.array(acen[ok]['ang_z'] * 3600)
            yag_times = np.array(acen[ok]['time'])
            zag_times = np.array(acen[ok]['time'])
        elif source == 'obc':
            telem = fetch.Msidset(['AOACYAN{}'.format(slot), 'AOACZAN{}'.format(slot)], start, stop)
            # Filter centroids for reasonble-ness
            yok = telem['AOACYAN{}'.format(slot)].vals > -3276
            zok = telem['AOACZAN{}'.format(slot)].vals > -3276
            yags = telem['AOACYAN{}'.format(slot)].vals[yok]
            yag_times = telem['AOACYAN{}'.format(slot)].times[yok]
            zags = telem['AOACZAN{}'.format(slot)].vals[zok]
            zag_times = telem['AOACZAN{}'.format(slot)].times[zok]
        else:
            raise ValueError("centroid_source must be 'obc' or 'ground'")
        self.yags = yags
        self.yag_times = yag_times
        self.zags = zags
        self.zag_times = zag_times
        if apply_dt is True:
            self.set_offsets()
        else:
            self.centroid_dt = 0
Exemple #3
0
def test_get_l0_images():
    """
    Do a validation test of get_l0_images:
    - Get 20 mins of image data for slot 6 of obsid 8008 (very nice clean stars)
    - Do first moment centroids in row and col
    - Compare to aspect pipeline FM centroids for same slot data

    This is a deep test that all the signs are right.  If not then everything
    breaks badly because the star image doesn't move in sync with row0, col0.
    """
    start = '2007:002:06:00:00'
    stop = '2007:002:06:20:00'

    imgs = aca_l0.get_l0_images(start, stop, slot=6)

    files = asp_l1.get_files(8008, content=['ACACENT'])
    acen = Table.read(files[0])
    # Pick FM centroids for slot 6
    ok = (acen['alg'] == 1) & (acen['slot'] == 6)
    acen = acen[ok]

    # Row and col centroids
    rcs = []
    ccs = []
    times = [img.TIME for img in imgs]

    # Easy way to do FM centroids with mgrid
    rw, cw = np.mgrid[0:6, 0:6]
    # rw = [[0, 0, 0, 0, 0, 0],
    #       [1, 1, 1, 1, 1, 1],
    #       [2, 2, 2, 2, 2, 2],
    #       [3, 3, 3, 3, 3, 3],
    #       [4, 4, 4, 4, 4, 4],
    #       [5, 5, 5, 5, 5, 5]]

    for img in imgs:
        norm = np.sum(img)
        rcs.append(np.sum(img * rw) / norm + img.row0)
        ccs.append(np.sum(img * cw) / norm + img.col0)

    rcen = interpolate(acen['cent_i'], acen['time'], times)
    ccen = interpolate(acen['cent_j'], acen['time'], times)

    assert np.all(np.abs(rcen - rcs) < 0.05)
    assert np.all(np.abs(ccen - ccs) < 0.05)
Exemple #4
0
def test_get_l0_images():
    """
    Do a validation test of get_l0_images:
    - Get 20 mins of image data for slot 6 of obsid 8008 (very nice clean stars)
    - Do first moment centroids in row and col
    - Compare to aspect pipeline FM centroids for same slot data

    This is a deep test that all the signs are right.  If not then everything
    breaks badly because the star image doesn't move in sync with row0, col0.
    """
    start = '2007:002:06:00:00'
    stop = '2007:002:06:20:00'

    imgs = aca_l0.get_l0_images(start, stop, slot=6)

    files = asp_l1.get_files(8008, content=['ACACENT'])
    acen = Table.read(files[0])
    # Pick FM centroids for slot 6
    ok = (acen['alg'] == 1) & (acen['slot'] == 6)
    acen = acen[ok]

    # Row and col centroids
    rcs = []
    ccs = []
    times = [img.TIME for img in imgs]

    # Easy way to do FM centroids with mgrid
    rw, cw = np.mgrid[0:6, 0:6]
    # rw = [[0, 0, 0, 0, 0, 0],
    #       [1, 1, 1, 1, 1, 1],
    #       [2, 2, 2, 2, 2, 2],
    #       [3, 3, 3, 3, 3, 3],
    #       [4, 4, 4, 4, 4, 4],
    #       [5, 5, 5, 5, 5, 5]]

    for img in imgs:
        norm = np.sum(img)
        rcs.append(np.sum(img * rw) / norm + img.row0)
        ccs.append(np.sum(img * cw) / norm + img.col0)

    rcen = interpolate(acen['cent_i'], acen['time'], times)
    ccen = interpolate(acen['cent_j'], acen['time'], times)

    assert np.all(np.abs(rcen - rcs) < 0.05)
    assert np.all(np.abs(ccen - ccs) < 0.05)
Exemple #5
0
def test_get_slot_data_8x8():
    """
    Do a validation test of get_l0_images:
    - Get 20 mins of image data for slot 6 of obsid 8008 (very nice clean stars)
    - Do first moment centroids in row and col
    - Compare to aspect pipeline FM centroids for same slot data

    This is a deep test that all the signs are right.  If not then everything
    breaks badly because the star image doesn't move in sync with row0, col0.
    """
    start = '2007:002:06:00:00'
    stop = '2007:002:06:20:00'

    slot_data = aca_l0.get_slot_data(start, stop, slot=6, centered_8x8=True)

    files = asp_l1.get_files(8008, content=['ACACENT'])
    acen = Table.read(files[0])
    # Pick FM centroids for slot 6
    ok = (acen['alg'] == 1) & (acen['slot'] == 6)
    acen = acen[ok]

    # Row and col centroids
    times = slot_data['TIME']

    # Easy way to do FM centroids with mgrid
    rw, cw = np.mgrid[0:8, 0:8]

    img_raw = slot_data['IMGRAW']  # np.round(slot_data['IMGRAW']).astype(int)
    norm = np.sum(img_raw, axis=(1, 2))
    rcs = np.sum(img_raw * rw, axis=(1, 2)) / norm + slot_data['IMGROW0'] - 1
    ccs = np.sum(img_raw * cw, axis=(1, 2)) / norm + slot_data['IMGCOL0'] - 1

    rcen = interpolate(acen['cent_i'], acen['time'], times)
    ccen = interpolate(acen['cent_j'], acen['time'], times)

    assert np.all(np.abs(rcen - rcs) < 0.05)
    assert np.all(np.abs(ccen - ccs) < 0.05)
Exemple #6
0
def update(obsids, config=None):
    from mica.archive import asp_l1
    """
    For a list of obsids, update the mica table of aspect 1 processing
    """
    if config is None:
        config = DEFAULT_CONFIG
    logger = logging.getLogger('asp 1 proc table')
    logger.setLevel(logging.INFO)
    if not len(logger.handlers):
        logger.addHandler(logging.StreamHandler())

    apstat_db = dict(dbi='sybase',
                     server='sqlsao',
                     database='axafapstat')
    proc_db_file = os.path.join(MICA_ARCHIVE, 'asp1', 'processing_asp_l1.db3')
    if not os.path.exists(proc_db_file) or os.stat(proc_db_file).st_size == 0:
        if not os.path.exists(config['data_root']):
            os.makedirs(config['data_root'])
        logger.info("creating aspect_1_proc db from {}".format(
            config['sql_def']))
        db_sql = Path(__file__).parent / config['sql_def']
        db_init_cmds = open(db_sql).read()
        proc_db = Ska.DBI.DBI(dbi='sqlite', server=proc_db_file)
        proc_db.execute(db_init_cmds)
    else:
        proc_db = Ska.DBI.DBI(dbi='sqlite', server=proc_db_file)
    archdb = Ska.DBI.DBI(dbi='sqlite',
                         server=os.path.join(config['data_root'],
                                             'archfiles.db3'))
    for obs in obsids:
        logger.info("Adding asp1 processing for obs {}".format(obs))
        asols = asp_l1.get_files(obsid=obs, content='ASPSOL', revision='all')
        for sol in asols:
            logger.info("\tprocessing {}".format(sol))
            procdir = os.path.dirname(sol)

            # As of DS 10.8.3, there are both "com" logs and per-ai logs.
            # This glob should get the per-ai logs.  We use the first one
            # to get an obspar version.
            logfiles = get_globfiles(os.path.join(procdir, "asp_l1_f*log*"),
                                     minfiles=1, maxfiles=None)
            aspect_log = gzip.open(logfiles[0], 'rt').read()

            # read the obspar version with a regex from the log
            obspar_version = int(
                re.search(r"axaff\d{5}_\d{3}N(\d{3})_obs0a\.par",
                          aspect_log).group(1))
            hdus = fits.open(sol)
            obi = hdus[1].header['OBI_NUM']
            revision = hdus[1].header['REVISION']
            with Ska.DBI.DBI(**apstat_db) as db:
                aspect_1 = db.fetchall("""SELECT * FROM aspect_1
                                             WHERE obsid = {obsid}
                                             AND obi = {obi}
                                             AND revision = {revision}
                                          """.format(obsid=obs,
                                                     obi=obi,
                                                     revision=revision))
            if len(aspect_1) > 1:
                raise ValueError
            indb = proc_db.fetchall("""SELECT * from aspect_1_proc
                                       WHERE aspect_1_id = {}
                                    """.format(aspect_1[0]['aspect_1_id']))
            archrec = archdb.fetchall("""select * from archfiles
                                     where obsid = {obsid}
                                     and revision = {revision}
                                     and content = 'ASPSOL'
                                  """.format(obsid=obs,
                                             revision=revision))
            # if already in the database, reset the V&V
            # it doesn't really need to be redone (could just be relinked)
            # but this should at least make the bookkeeping consistent
            if len(indb):
                logger.info("Resetting vv_complete to 0 for obsid {obsid} rev {revision}".format(
                        obsid=obs, revision=revision))
                proc_db.execute("""UPDATE aspect_1_proc SET vv_complete = 0
                                   WHERE obsid = {obsid}
                                   AND revision = {revision}
                                """.format(obsid=obs,
                                           revision=revision))
            else:
                proc_db.insert(dict(aspect_1_id=aspect_1[0]['aspect_1_id'],
                                    obsid=obs,
                                    obi=obi,
                                    revision=revision,
                                    obspar_version=obspar_version,
                                    ap_date=str(aspect_1[0]['ap_date'])),
                               'aspect_1_proc')
            isdefault = 'NULL' if archrec[0]['isdefault'] is None else 1
            proc_db.execute(f"""UPDATE aspect_1_proc SET isdefault = {isdefault}
                                WHERE obsid = {obs}
                                AND revision = {revision}
                                """)
            logger.info("\tUpdated table for {}".format(obs))
Exemple #7
0
def update(obsids, config=None):
    from mica.archive import asp_l1
    """
    For a list of obsids, update the mica table of aspect 1 processing
    """
    if config is None:
        config = DEFAULT_CONFIG
    logger = logging.getLogger('asp 1 proc table')
    logger.setLevel(logging.INFO)
    if not len(logger.handlers):
        logger.addHandler(logging.StreamHandler())

    apstat_db = dict(dbi='sybase',
                     server='sqlsao',
                     database='axafapstat')
    proc_db_file = os.path.join(MICA_ARCHIVE, 'asp1', 'processing_asp_l1.db3')
    if not os.path.exists(proc_db_file) or os.stat(proc_db_file).st_size == 0:
        if not os.path.exists(config['data_root']):
            os.makedirs(config['data_root'])
        logger.info("creating aspect_1_proc db from {}".format(
            config['sql_def']))
        db_sql = os.path.join(os.environ['SKA_DATA'], 'mica', config['sql_def'])
        db_init_cmds = file(db_sql).read()
        proc_db = dict(dbi='sqlite', server=proc_db_file)
        proc_db.execute(db_init_cmds)
    else:
        proc_db = Ska.DBI.DBI(dbi='sqlite', server=proc_db_file)
    archdb = Ska.DBI.DBI(dbi='sqlite',
                         server=os.path.join(config['data_root'],
                                             'archfiles.db3'))
    for obs in obsids:
        logger.info("Adding asp1 processing for obs {}".format(obs))
        asols = asp_l1.get_files(obsid=obs, content='ASPSOL', revision='all')
        for sol in asols:
            logger.info("\tprocessing {}".format(sol))
            procdir = os.path.dirname(sol)
            logfile = glob(os.path.join(procdir, "*log*"))[0]
            aspect_log = gzip.open(logfile).read()
            # read the obspar version with a regex from the log
            obspar_version = int(
                re.search("axaff\d{5}_\d{3}N(\d{3})_obs0a\.par",
                          aspect_log).group(1))
            hdus = fits.open(sol)
            obi = hdus[1].header['OBI_NUM']
            revision = hdus[1].header['REVISION']
            with Ska.DBI.DBI(**apstat_db) as db:
                aspect_1 = db.fetchall("""SELECT * FROM aspect_1
                                             WHERE obsid = {obsid}
                                             AND obi = {obi}
                                             AND revision = {revision}
                                          """.format(obsid=obs,
                                                     obi=obi,
                                                     revision=revision))
            if len(aspect_1) > 1:
                raise ValueError
            indb = proc_db.fetchall("""SELECT * from aspect_1_proc
                                       WHERE aspect_1_id = {}
                                    """.format(aspect_1[0]['aspect_1_id']))
            archrec = archdb.fetchall("""select * from archfiles
                                     where obsid = {obsid}
                                     and revision = {revision}
                                     and content = 'ASPSOL'
                                  """.format(obsid=obs,
                                             revision=revision))
            # if already in the database, reset the V&V
            # it doesn't really need to be redone (could just be relinked)
            # but this should at least make the bookkeeping consistent
            if len(indb):
                logger.info("Resetting vv_complete to 0 for obsid {obsid} rev {revision}".format(
                        obsid=obs, revision=revision))
                proc_db.execute("""UPDATE aspect_1_proc SET vv_complete = 0
                                   WHERE obsid = {obsid}
                                   AND revision = {revision}
                                """.format(obsid=obs,
                                           revision=revision))
            else:
                proc_db.insert(dict(aspect_1_id=aspect_1[0]['aspect_1_id'],
                                    obsid=obs,
                                    obi=obi,
                                    revision=revision,
                                    obspar_version=obspar_version,
                                    ap_date=str(aspect_1[0]['ap_date'])),
                               'aspect_1_proc')
            isdefault = archrec[0]['isdefault']
            if isdefault is not None:
                proc_db.execute("""UPDATE aspect_1_proc SET isdefault = {isdefault}
                                   WHERE obsid = {obsid}
                                   AND revision = {revision}
                                """.format(isdefault=archrec[0]['isdefault'],
                                           obsid=obs,
                                           revision=revision))
            logger.info("\tUpdated table for {}".format(obs))
idx = np.unique(obs['obsid'], return_index=True)[1]
obs = Table(obs[idx])
obs.sort('tstart')
obs['datestart'] = Time(obs['tstart'], format='cxcsec').yday
obs.pprint(max_lines=-1)

obsid_index = shelve.open(obsid_file)

# Go through obsids and either process or skip
for obsid in obs['obsid']:
    if str(obsid) in obsid_index:
        logger.info('Skipping obsid {} - already in archive'.format(obsid))
        continue

    logger.info('Processing obsid {}'.format(obsid))
    asol_files = sorted(asp_l1.get_files(obsid=obsid, content='ASPSOL'))
    if not asol_files:
        logger.info('Skipping obsid {} - no asol files'.format(obsid))
        continue

    asol = get_asol(obsid, asol_files, opt.dt)
    add_asol_to_h5(h5_file, asol)
    obsid_index[str(obsid)] = asol_files

obsid_index.close()

logger.info('File {} updated'.format(h5_file))
logger.info('File {} updated'.format(obsid_file))

# Write out to FITS
fits_file = re.sub(r'\.h5$', '.fits', h5_file)