示例#1
0
def get_trak_cat_from_telem(start, stop, cmd_quat):
    start = DateTime(start)
    stop = DateTime(stop)
    msids = [
        "{}{}".format(m, i)
        for m in ['AOACYAN', 'AOACZAN', 'AOACFID', 'AOIMAGE', 'AOACFCT']
        for i in range(0, 8)
    ]
    telem = fetch.MSIDset(
        ['AOACASEQ', 'CORADMEN', 'AOPCADMD', 'AONSTARS', 'AOKALSTR'] + msids,
        start, stop)
    att = fetch.MSIDset(['AOATTQT{}'.format(i) for i in [1, 2, 3, 4]], start,
                        stop)
    cat = {}
    for slot in range(0, 8):
        track = telem['AOACFCT{}'.format(slot)].vals == 'TRAK'
        fid = telem['AOACFID{}'.format(slot)].vals == 'FID '
        star = telem['AOIMAGE{}'.format(slot)].vals == 'STAR'
        n = 30
        if np.count_nonzero(track) < n:
            continue
        if np.any(fid & track):
            cat[slot] = {
                'type': 'FID',
                'yag': telem['AOACYAN{}'.format(slot)].vals[fid & track][0],
                'zag': telem['AOACZAN{}'.format(slot)].vals[fid & track][0]
            }
        else:
            n_samples = np.count_nonzero(track & star)
            if n_samples < (n + 4):
                continue
            # If there is tracked data with a star, let's try to get our n samples from about
            # the middle of the range
            mid_point = int(n_samples / 2.)
            yags = []
            zags = []
            for sample in range(mid_point - int(n / 2.),
                                mid_point + int(n / 2.)):
                qref = Quat(
                    normalize([
                        att['AOATTQT{}'.format(i)].vals[track & star][sample]
                        for i in [1, 2, 3, 4]
                    ]))
                ra, dec = yagzag2radec(
                    telem['AOACYAN{}'.format(slot)].vals[track & star][sample]
                    / 3600.,
                    telem['AOACZAN{}'.format(slot)].vals[track & star][sample]
                    / 3600., qref)
                yag, zag = radec2yagzag(ra, dec, cmd_quat)
                yags.append(yag)
                zags.append(zag)
            # This doesn't detect MON just yet
            cat[slot] = {
                'type': 'STAR',
                'yag': np.median(yags) * 3600.,
                'zag': np.median(zags) * 3600.
            }
    return cat, telem
示例#2
0
def get_cmd_quat(date):
    date = DateTime(date)
    cmd_quats = fetch.MSIDset(['AOCMDQT{}'.format(i) for i in [1, 2, 3]],
                              date.secs, date.secs + 120)
    cmd_q4 = np.sqrt(
        np.abs(1 - cmd_quats['AOCMDQT1'].vals[0]**2 -
               cmd_quats['AOCMDQT2'].vals[0]**2 -
               cmd_quats['AOCMDQT3'].vals[0]**2))
    return Quat(
        normalize([
            cmd_quats['AOCMDQT1'].vals[0], cmd_quats['AOCMDQT2'].vals[0],
            cmd_quats['AOCMDQT3'].vals[0], cmd_q4
        ]))
示例#3
0
def get_data(start, stop, obsid=None, starcheck=None):
    # Get telemetry
    msids = [
        'AOACASEQ', 'AOACQSUC', 'AOFREACQ', 'AOFWAIT', 'AOREPEAT', 'AOACSTAT',
        'AOACHIBK', 'AOFSTAR', 'AOFATTMD', 'AOACPRGS', 'AOATUPST', 'AONSTARS',
        'AOPCADMD', 'AORFSTR1', 'AORFSTR2', 'AOATTQT1', 'AOATTQT2', 'AOATTQT3',
        'AOATTQT4'
    ]
    per_slot = [
        'AOACQID', 'AOACFCT', 'AOIMAGE', 'AOACMAG', 'AOACYAN', 'AOACZAN',
        'AOACICC', 'AOACIDP', 'AOACIIR', 'AOACIMS', 'AOACIQB', 'AOACISP'
    ]
    slot_msids = [
        field + '%s' % slot for field in per_slot for slot in range(0, 8)
    ]

    start_time = DateTime(start).secs
    stop_time = DateTime(stop)

    dat = fetch.MSIDset(msids + slot_msids, start_time, stop_time)
    if len(dat['AOACASEQ']) == 0:
        raise ValueError("No telemetry for obsid {}".format(obsid))
    # Interpolate the MSIDset onto the original time grid (which shouldn't do much)
    # but also remove all rows where any one msid has a bad value
    dat.interpolate(times=dat['AOACASEQ'].times, bad_union=True)
    eng_data = Table([col.vals for col in dat.values()], names=dat.keys())
    eng_data['times'] = dat.times

    times = eng_data['times']
    if starcheck is None:
        return eng_data, times, None
    catalog = Table(starcheck['cat'])
    catalog.sort('idx')
    # Filter the catalog to be just guide stars
    catalog = catalog[(catalog['type'] == 'GUI') | (catalog['type'] == 'BOT')]
    # Get the position deltas relative to onboard solution
    dy, dz, star_info, yag, zag = _deltas_vs_obc_quat(eng_data, times, catalog)
    # And add the deltas to the table
    for slot in range(0, 8):
        if slot not in dy:
            continue
        eng_data['dy{}'.format(slot)] = dy[slot].data
        eng_data['dz{}'.format(slot)] = dz[slot].data
        eng_data['cat_yag{}'.format(slot)] = yag[slot]
        eng_data['cat_zag{}'.format(slot)] = zag[slot]
        cat_entry = catalog[catalog['slot'] == slot][0]
        dmag = eng_data['AOACMAG{}'.format(slot)] - cat_entry['mag']
        eng_data['dmag'] = dmag.data
    eng_data['time'] = times
    return eng_data, star_info
示例#4
0
def get_pcad(dwell):
    msids = [
        'AOACASEQ', 'AOPCADMD', 'AOKALSTR', 'AONSTARS', 'AOATTQT1', 'AOATTQT2',
        'AOATTQT3', 'AOATTQT4', 'AOACIMSS'
    ]
    slots = range(0, 8)
    slot_cols = [
        'AOACMAG', 'AOACYAN', 'AOACZAN', 'AOACFCT', 'AOIMAGE', 'AOACIDP',
        'AOACIIR', 'AOACIMS', 'AOACISP'
    ]
    slot_msids = [
        "{}{}".format(field, slot) for field in slot_cols for slot in slots
    ]
    msids.extend(slot_msids)
    return fetch.MSIDset(msids, dwell.start, dwell.stop)
示例#5
0
def get_modern_data(manvr, dwell, starcheck):
    catalog = Table(starcheck['cat'])
    catalog.sort('idx')
    # Filter the catalog to be just acquisition stars
    catalog = catalog[(catalog['type'] == 'ACQ') | (catalog['type'] == 'BOT')]
    slot_for_pos = [cat_row['slot'] for cat_row in catalog]
    pos_for_slot = dict([(slot, idx) for idx, slot in enumerate(slot_for_pos)])
    # Also, save out the starcheck index for each slot for later
    index_for_slot = dict([(cat_row['slot'], cat_row['idx'])
                           for cat_row in catalog])

    # Get telemetry
    msids = [
        'AOACASEQ', 'AOACQSUC', 'AOFREACQ', 'AOFWAIT', 'AOREPEAT', 'AOACSTAT',
        'AOACHIBK', 'AOFSTAR', 'AOFATTMD', 'AOACPRGS', 'AOATUPST', 'AONSTARS',
        'AOPCADMD', 'AORFSTR1', 'AORFSTR2', 'AOATTQT1', 'AOATTQT2', 'AOATTQT3',
        'AOATTQT4'
    ]
    per_slot = [
        'AOACQID', 'AOACFCT', 'AOIMAGE', 'AOACMAG', 'AOACYAN', 'AOACZAN',
        'AOACICC', 'AOACIDP', 'AOACIIR', 'AOACIMS', 'AOACIQB', 'AOACISP'
    ]
    slot_msids = [
        field + '%s' % slot for field in per_slot for slot in range(0, 8)
    ]

    start_time = DateTime(manvr.acq_start).secs
    stop_time = DateTime(dwell.start).secs + 100
    raw_eng_data = fetch.MSIDset(msids + slot_msids,
                                 start_time,
                                 stop_time,
                                 filter_bad=True)
    eng_data = Table([raw_eng_data[col].vals for col in msids], names=msids)
    for field in slot_msids:
        eng_data.add_column(Column(name=field, data=raw_eng_data[field].vals))
        times = Table([raw_eng_data['AOACASEQ'].times], names=['time'])
    if not len(eng_data['AOACASEQ']):
        raise ValueError("No telemetry for obsid {}".format(manvr.get_obsid()))

    # Estimate the offsets from the expected catalog positions
    dy, dz, star_info = _deltas_vs_obc_quat(eng_data, times['time'], catalog)
    # And add the deltas to the table
    for slot in range(0, 8):
        if slot not in dy:
            continue
        eng_data.add_column(
            Column(name='dy{}'.format(slot), data=dy[slot].data))
        eng_data.add_column(
            Column(name='dz{}'.format(slot), data=dz[slot].data))
        cat_entry = catalog[catalog['slot'] == slot][0]
        dmag = eng_data['AOACMAG{}'.format(slot)] - cat_entry['mag']
        eng_data.add_column(Column(name='dmag{}'.format(slot), data=dmag.data))

    # Get the one-shot delta quaternion and the dot product of the deltas
    delta_quat, dot_q = get_delta_quat(eng_data, times['time'], manvr)
    one_shot_length = np.degrees(2 * np.arccos(dot_q))
    one_shot_length = np.min([one_shot_length, 360 - one_shot_length])
    one_shot_length = one_shot_length * 3600

    # Update a copy of the telemetry structure with quaternions
    # corrected by the one-shot delta
    corr_eng_data = eng_data.copy()
    uncorr_times = (times['time'] < DateTime(manvr.guide_start).secs + 1.0)
    q_orig = Quat(q=np.array([
        eng_data[uncorr_times]['AOATTQT1'], eng_data[uncorr_times]['AOATTQT2'],
        eng_data[uncorr_times]['AOATTQT3'], eng_data[uncorr_times]['AOATTQT4']
    ]).transpose())
    q_corr = q_mult(delta_quat.q, q_orig.q)
    corr_eng_data['AOATTQT1'][uncorr_times] = q_corr.q.transpose()[0]
    corr_eng_data['AOATTQT2'][uncorr_times] = q_corr.q.transpose()[1]
    corr_eng_data['AOATTQT3'][uncorr_times] = q_corr.q.transpose()[2]
    corr_eng_data['AOATTQT4'][uncorr_times] = q_corr.q.transpose()[3]
    corr_dy, corr_dz, si = _deltas_vs_obc_quat(corr_eng_data, times['time'],
                                               catalog)
    # delete the now-extra copy of the data
    del corr_eng_data
    # And add the corrected deltas to the table
    for slot in range(0, 8):
        if slot not in corr_dy:
            continue
        eng_data.add_column(
            Column(name='corr_dy{}'.format(slot), data=corr_dy[slot].data))
        eng_data.add_column(
            Column(name='corr_dz{}'.format(slot), data=corr_dz[slot].data))

    # Also add the acquisition id in a useful way
    for slot in range(0, 8):
        if slot not in pos_for_slot:
            continue
        eng_data.add_column(
            Column(name='POS_ACQID{}'.format(slot),
                   data=eng_data['AOACQID{}'.format(pos_for_slot[slot])]))

    return eng_data, times['time'], one_shot_length, star_info
示例#6
0
    start, DROP_PAD)

# Make a dictionary to store fid drops; use dwell start time as key
all_drop = {}
for dwell in dwells:

    if (dwell.start == '2015:076:03:53:33.193'):
        print """
SCS107 for 16344, which isn't caught by this script
because the fids are all lost within the last {} secs
of the observation""".format(DROP_PAD)
        continue

    actual_fid_slots = []
    pcad_data = fetch.MSIDset(ALL_COLS,
                              dwell.tstart,
                              dwell.stop,
                              filter_bad=True)
    # Use the first value of AOIMAGE to determine if the slot was ever a FID
    for fid_slot in FID_SLOTS:
        if pcad_data['AOIMAGE{}'.format(fid_slot)].vals[0] == 'FID ':
            actual_fid_slots.append(fid_slot)

    # For each fid slot, check to see if we're still tracking the fid up to
    # the end of the dwell.  Record loss times if fid lost more than DROP_PAD
    # before the end of the dwell.
    drop_slot_time = {}
    drop_slot_dtime = {}
    for slot in actual_fid_slots:
        if pcad_data['AOIMAGE{}'.format(slot)].vals[-1] == 'FID ':
            continue
        if pcad_data['AOACFCT{}'.format(slot)].vals[-1] == 'TRAK':
示例#7
0
文件: pcad_table.py 项目: sot/mica
def get_acq_table(obsid):

    manvrs = events.manvrs.filter(obsid=obsid)
    if not len(manvrs):
        return None
    manvr = manvrs[0]

    start_time = DateTime(manvr.acq_start).secs
    stop_time = start_time + (60 * 5)
    acq_data = fetch.MSIDset(msids + slot_msids, start_time, stop_time)

    vals = Table([acq_data[col].vals for col in msids], names=msids)
    for field in slot_msids:
        vals.add_column(Column(name=field, data=acq_data[field].vals))
        times = Table([acq_data['AOACASEQ'].times], names=['time'])

    def compress_data(data, dtime):
        return data[data['AOREPEAT'] == '0 '], dtime[data['AOREPEAT'] == '0 ']

    vals, times = compress_data(vals, times)

    # Get the catalog for the stars
    # This is used both to map ACQID to the right slot and
    # to get the star positions to estimate deltas later
    timeline_at_acq = mica.starcheck.starcheck.get_timeline_at_date(manvr.start)
    mp_dir = None if (timeline_at_acq is None) else timeline_at_acq['mp_dir']
    starcheck = mica.starcheck.get_starcheck_catalog(int(obsid), mp_dir=mp_dir)
    if 'cat' not in starcheck:
        raise ValueError('No starcheck catalog found for {}'.format(obsid))
    catalog = Table(starcheck['cat'])
    catalog.sort('idx')
    # Filter the catalog to be just acquisition stars
    catalog = catalog[(catalog['type'] == 'ACQ') | (catalog['type'] == 'BOT')]
    slot_for_pos = [cat_row['slot'] for cat_row in catalog]
    pos_for_slot = dict([(slot, idx) for idx, slot in enumerate(slot_for_pos)])
    # Also, save out the starcheck index for each slot for later
    index_for_slot = dict([(cat_row['slot'], cat_row['idx']) for cat_row in catalog])

    # Estimate the offsets from the expected catalog positions
    dy, dz = deltas_vs_obc_quat(vals, times['time'], catalog)
    for slot in range(0, 8):
        vals.add_column(Column(name='dy{}'.format(slot), data=dy[slot].data))
        vals.add_column(Column(name='dz{}'.format(slot), data=dz[slot].data))
        cat_entry = catalog[catalog['slot'] == slot][0]
        dmag = vals['AOACMAG{}'.format(slot)] - cat_entry['mag']
        vals.add_column(Column(name='dmag{}'.format(slot), data=dmag.data))

    # make a list of dicts of the table
    simple_data = []
    kalm_start = None
    for drow, trow in zip(vals, times):
        if (kalm_start is None) and (drow['AOACASEQ'] == 'KALM'):
            kalm_start = trow['time']
        if (kalm_start is not None) and (trow['time'] > kalm_start + 5):
            continue
        slot_data = {'slots': [],
                     'time': trow['time'],
                     'aorfstr1_slot': slot_for_pos[int(drow['AORFSTR1'])],
                     'aorfstr2_slot': slot_for_pos[int(drow['AORFSTR2'])],
                     }
        for m in msids:
            slot_data[m] = drow[m]
        for slot in range(0, 8):
            row_dict = {'slot': slot,
                        'catpos': pos_for_slot[slot],
                        'index': index_for_slot[slot]}
            for col in per_slot:
                if col not in ['AOACQID']:
                    row_dict[col] = drow['{}{}'.format(col, slot)]
            for col in ['dy', 'dz', 'dmag']:
                row_dict[col] = drow['{}{}'.format(col, slot)]
            row_dict['POS_ACQID'] = drow['AOACQID{}'.format(pos_for_slot[slot])]
            slot_data['slots'].append(row_dict)
        simple_data.append(slot_data)

    return simple_data
示例#8
0
# import Chandra.Time
# datetime = Chandra.Time.DateTime(126446464.184)
# print datetime.date
# print datetime.greta
# print Chandra.Time.DateTime('2009:235:12:13:14').secs

# <demo> --- stop ---

## **Exporting to CSV for local access**

## If you want to move the fetch data to your local machine an ``MSID`` or
## ``MSIDset`` can be exported as ASCII data table(s) in CSV format.  This can
## easily be imported into Excel or other PC applications.::

biases = fetch.MSIDset(['aogbias1', 'aogbias2', 'aogbias3'], '2002:001', stat='daily')
biases.write_zip('biases.zip')

# <demo> --- stop ---

## To suspend the ipython shell and look at the newly created file do::

##   <Ctrl>-z

##   % ls -l biases.zip
##   -rw-rw-r-- 1 aldcroft aldcroft 366924 Dec  4 17:07 biases.zip

##   % unzip -l biases.zip
##   Archive:  biases.zip
##     Length     Date   Time    Name
##    --------    ----   ----    ----
示例#9
0
def get_xray_data(obsids):

    for obsid in obsids:

        obsdir = "%s/auto/obs%05d" % (projdir, obsid)
        if not os.path.exists(obsdir):
            os.makedirs(obsdir)

        src_file = os.path.join(obsdir, 'picked_src.dat')
        obs_info = sqlaca.fetchone(
            "select * from observations where obsid = %d" % obsid)
        if obs_info is None:
            continue
        if not os.path.exists(src_file):
            src = find_obsid_src(obsid, obs_info)
            if src is None:
                continue
            else:
                src.write(src_file, format='ascii.tab')
        else:
            src = Table.read(src_file, format='ascii.tab')

        # cut-out region with a point source for this obsid
        point = '%s/point_source.fits' % obsdir
        #print point
        if (not os.path.exists(point) or ((os.stat(point).st_mtime < MTIME)
                                          and obs_info['instrume'] == 'HRC')
                or REDO):
            extract_point(obs_info, src, obsdir, point)
        obsid_src = point

        # periscope tilt telemetry
        if not os.path.exists(os.path.join(obsdir, 'tilt.pkl')) or REDO:
            obs = obs_info
            msids = [
                'OOBAGRD3', 'OOBAGRD6', 'OHRTHR42', 'OHRTHR43', 'OOBTHR39',
                'OHRTHR24', '4RT702T', 'OHRTHR24', 'AACBPPT', 'AACH1T',
                'AACCCDPT', 'AACBPRT'
            ]
            telem = fetch.MSIDset(msids, obs['tstart'] - 1000,
                                  obs['tstop'] + 1000)
            telemtime = telem['OOBAGRD3'].times
            tilt = dict(telem)
            tilt.update(
                dict(time=telemtime,
                     tilt_axial=telem['OOBAGRD3'].vals,
                     tilt_diam=telem['OOBAGRD6'].vals))
            tilt_pick = open(os.path.join(obsdir, 'tilt.pkl'), 'w')
            cPickle.dump(tilt, tilt_pick)
            tilt_pick.close()
            #print os.path.join(obsdir, 'tilt.pkl')

        # position data
        if (not os.path.exists(os.path.join(obsdir, 'released_pos.pkl'))
                or (os.stat(os.path.join(obsdir, 'released_pos.pkl')).st_mtime
                    < os.stat(point).st_mtime) or REDO):
            obs = obs_info
            print "making released_pos.pkl for {}".format(obs['obsid'])
            print obs
            evts = Table.read(obsid_src)
            q = Quaternion.Quat(
                [obs['ra_nom'], obs['dec_nom'], obs['roll_nom']])

            # only use the first aspect interval of obsid 14457
            if obsid == 14457:
                evts = evts[evts['time'] < 490232479.878]
            y, z = Ska.quatutil.radec2yagzag(evts['RA'], evts['DEC'], q)
            pos = dict(time=np.array(evts['time']),
                       yag=np.array(y * 3600),
                       zag=np.array(z * 3600))
            pos_pick = open(os.path.join(obsdir, 'released_pos.pkl'), 'w')
            cPickle.dump(pos, pos_pick)
            pos_pick.close()

        GRADIENTS = dict(OOBAGRD3=dict(
            yag=6.98145650e-04,
            zag=9.51578351e-05,
        ),
                         OOBAGRD6=dict(
                             yag=-1.67009240e-03,
                             zag=-2.79084775e-03,
                         ))

        # position data
        if (not os.path.exists(os.path.join(obsdir, 'pos.pkl'))
                or (os.stat(os.path.join(obsdir, 'pos.pkl')).st_mtime <
                    os.stat(point).st_mtime) or REDO):
            obs = obs_info
            print "making pos.pkl for {}".format(obs['obsid'])
            evts = Table.read(obsid_src)
            q = Quaternion.Quat(
                [obs['ra_nom'], obs['dec_nom'], obs['roll_nom']])

            # only use the first aspect interval of obsid 14457
            if obsid == 14457:
                evts = evts[evts['time'] < 490232479.878]
            y, z = Ska.quatutil.radec2yagzag(evts['RA'], evts['DEC'], q)
            # retrieve gradient telemetry
            tstart = evts['time'][0]
            tstop = evts['time'][-1]
            gradients = fetch.MSIDset(GRADIENTS.keys(), tstart - 100,
                                      tstop + 100)
            for msid in gradients:
                # filter bad telemetry in place
                filter_bad_telem(gradients[msid])
                times = gradients[msid].times
                evt_idx = np.searchsorted(times, evts['time'])
                # find a mean gradient, because this calibration is relative to mean
                mean_gradient = np.mean(gradients[msid].vals[evt_idx])
                # and smooth the telemetry to deal with slow changes and large step sizes..
                smooth_gradient = smooth(gradients[msid].vals)
                y += (smooth_gradient[evt_idx] -
                      mean_gradient) * GRADIENTS[msid]['yag']
                z += (smooth_gradient[evt_idx] -
                      mean_gradient) * GRADIENTS[msid]['zag']

            pos = dict(time=np.array(evts['time']),
                       yag=np.array(y * 3600),
                       zag=np.array(z * 3600))

            pos_pick = open(os.path.join(obsdir, 'pos.pkl'), 'w')
            cPickle.dump(pos, pos_pick)
            pos_pick.close()

    pos_files = glob("auto/obs*/released_pos.pkl")
    print "Retrieved sources for {} observations".format(len(pos_files))
示例#10
0
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import Ska.engarchive.fetch as fetch
from Ska.Matplotlib import plot_cxctime
import Ska.Numpy

tstart = '2009:313:16:00:00'
tstop = '2009:313:17:00:00'

# Get OBC rates and gyro counts
obc = fetch.MSIDset(tstart,
                    tstop, ['aorate1', 'aorate2', 'aorate3'],
                    filter_bad=True)
gyr = fetch.MSIDset(tstart,
                    tstop, ['aogyrct1', 'aogyrct2', 'aogyrct3', 'aogyrct4'],
                    filter_bad=True)

# Transform delta gyro counts (4 channels) to a body rate (3 axes)
cts2rate = array([[-0.5, 0.5, 0.5, -0.5],
                  [-0.25623091, 0.60975037, -0.25623091, 0.60975037],
                  [-0.55615682, -0.05620959, -0.55615682, -0.05620959]])

# Calculate raw spacecraft rate directly from gyro data
cts = np.array([
    gyr['aogyrct1'].vals, gyr['aogyrct2'].vals, gyr['aogyrct3'].vals,
    gyr['aogyrct4'].vals
])
raw_times = (gyr['aogyrct1'].times[1:] + gyr['aogyrct1'].times[:-1]) / 2
delta_times = gyr['aogyrct1'].times[1:] - gyr['aogyrct1'].times[:-1]
delta_cts = cts[:, 1:] - cts[:, :-1]
raw_rates = np.dot(cts2rate, delta_cts) * 0.02 / delta_times
示例#11
0

opt, args = get_options()

tstop = DateTime(
    opt.stop).secs  # if opt.stop is None then this returns current time
if opt.start is None:
    tstart = tstop - opt.duration
else:
    tstart = DateTime(opt.start).secs

if 'msids' not in globals():
    dwells = events.dwells.filter(tstart, tstop)
    tstop = np.min([dwells[len(dwells) - 1].tstop, tstop])
    msids = fetch.MSIDset(['aorate1', 'aorate2', 'aorate3'],
                          tstart,
                          tstop,
                          filter_bad=True)

# Filter to times within Kalman interval with no momentum dumps or SIM moves nearby
if 'times' not in globals():
    print('Calculating and final filtering')
    events.dwells.interval_pad = (-1000, -1000)
    events.tsc_moves.interval_pad = (1000, 1000)
    events.dumps.interval_pad = (1000, 1000)
    for msid in msids:
        msids[msid].select_intervals(events.dwells)
        msids[msid].remove_intervals(events.tsc_moves)
        msids[msid].remove_intervals(events.dumps)
    roll_rates = msids['aorate1'].vals * 206264
    pitch_rates = msids['aorate2'].vals * 206264
    yaw_rates = msids['aorate3'].vals * 206264