def get_average_tel_power(t1, t2):
    """ Get average telescope power used during period

    :param t1: start of period
    :param t2: end of period
    :return: mean total (PTOTAL) telescope power for period
    """
    p = fetch.Msid('DP_PTOTAL',
                   DateTime(t1).date,
                   DateTime(t2).date,
                   stat='daily')
    return np.mean(p.vals)
Example #2
0
def plot_housing_temperature():
    dat = fetch.Msid('aach1t', '2000:001', stat='daily')
    plt.close(1)
    fig = plt.figure(figsize=(8, 4))
    year = Time(dat.times, format='cxcsec').decimalyear
    plt.plot(year, dat.vals)
    plt.grid()
    plt.xlabel('Year')
    plt.ylabel('Temperature (degF)')
    plt.title('Aspect Camera housing temperature trend')

    outroot = os.path.join(opt.data_root, 'aca_housing_temperature')
    logger.info('Writing plot files {}.png'.format(outroot))
    fig.patch.set_visible(False)
    plt.savefig(outroot + '.png', facecolor="none")
def gen_thermal_checklist(filename='thermlist.csv'):
    def read_therm_checklist(filename):
        def splitline(line):
            return line.strip().split(',')

        with open(filename, 'r') as fid:
            lines = fid.readlines()

        header = splitline(lines.pop(0))
        thermlist = [splitline(line) for line in lines]

        return thermlist, header

    thermlist, header = read_therm_checklist(filename)

    thermdict = {}
    notinarchive = []
    missing = []
    for i, line in enumerate(thermlist):
        ok = False
        greta_msid = line[0].strip().lower()
        ska_msid = line[1].strip().lower()
        try:
            # Assume the local archive is not more than 30 days out of date
            data = fetch.Msid(ska_msid,
                              DateTime().secs - 3600 * 24 * 30,
                              stat='daily')
            ok = True
        except IOError as e:
            missing.append(thermlist[i])
        except ValueError as e:
            notinarchive.append(thermlist[i])

        if ok:
            thermdict[ska_msid] = {
                'greta_msid': greta_msid,
                'owner': line[2].strip(),
                'description': line[3].strip()
            }
            if data.state_codes:
                thermdict[ska_msid]['type'] = 'expst'
            else:
                thermdict[ska_msid]['type'] = 'limit'

    with open('thermalmsiddata.pkl', 'w') as fid:
        pickle.dump((thermdict, missing, notinarchive), fid, protocol=2)
Example #4
0
def plot_dropouts(msid, thresh=-20):
    close('all')
    x = fetch.Msid(msid, '2012:300', stat='5min')
    d_temp = diff(x.vals)
    drop = d_temp < thresh
    figure()
    subplot(2, 1, 1)
    title(msid + ' Dropouts w.r.t. Time \n Dropout defined as:  T2 - T1 < ' +
          str(thresh) + ' deg F')
    plot_cxctime(x.times, x.vals, 'b', label='All Temps')
    plot_cxctime(x.times[:-1][drop],
                 x.vals[:-1][drop],
                 'r.',
                 alpha=.5,
                 label='Temp Prior to a Dropout')
    ylabel('deg F')
    legend(loc=3)
    subplot(2, 1, 2)
    hist(x.vals,
         bins=20,
         range=[min(x.vals), max(x.vals)],
         normed=True,
         color='b',
         label='All Temps')
    hist(x.vals[:-1][drop],
         bins=20,
         range=[min(x.vals), max(x.vals)],
         normed=True,
         color='r',
         alpha=.5,
         label='Temp Prior to a Dropout')
    legend(loc=3)
    ylabel('Fraction of 5-min Data Points')
    xlabel('deg F')
    tight_layout()
    savefig(msid + '_dropouts.png')
def check_limit_changes(t1, t2):

    db = pylimmon.open_sqlite_file()
    cursor = db.cursor()
    cursor.execute(
        '''SELECT a.msid, a.setkey FROM limits AS a WHERE a.datesec>=? 
                      AND a.datesec <=? ''',
        [DateTime(t1).secs, DateTime(t2).secs])
    allchanges = cursor.fetchall()

    msid_sets = [(d[0], d[1]) for d in allchanges]
    msid_sets = set(msid_sets)

    changes = {}

    for msid, setval in list(msid_sets):

        try:
            if 'wide' in msid.lower():
                skamsid = msid[:-5]
            else:
                skamsid = msid

            data = fetch.Msid(skamsid,
                              t1,
                              DateTime(t1).secs + 3600,
                              stat='5min')
            desc = data.tdb.technical_name
        except:
            desc = None

        cursor.execute(
            '''SELECT a.msid, a.setkey, a.default_set, a.warning_low, 
                              a.caution_low, a.caution_high, a.warning_high, a.date, a.mlmenable, 
                              a.switchstate, a.mlimsw FROM limits AS a 
                              WHERE a.setkey = ? AND a.msid = ? AND a.datesec < ?
                              AND a.modversion = (SELECT MAX(b.modversion) FROM limits AS b
                              WHERE a.msid = b.msid and a.setkey = b.setkey and b.datesec < ?)''',
            [setval, msid, DateTime(t1).secs,
             DateTime(t1).secs])
        b = cursor.fetchone()
        if not b:
            b = []

        cursor.execute(
            '''SELECT a.msid, a.setkey, a.default_set, a.warning_low, 
                              a.caution_low, a.caution_high, a.warning_high, a.date, a.mlmenable, 
                              a.switchstate, a.mlimsw FROM limits AS a 
                              WHERE a.setkey = ? AND a.msid = ? AND a.datesec >= ? AND 
                              a.datesec <= ? AND a.modversion = (SELECT MAX(b.modversion) 
                              FROM limits AS b WHERE a.msid = b.msid AND a.setkey = b.setkey 
                              AND b.datesec >= ? AND b.datesec <= ?)''', [
                setval, msid,
                DateTime(t1).secs,
                DateTime(t2).secs,
                DateTime(t1).secs,
                DateTime(t2).secs
            ])
        a = cursor.fetchone()

        changes[(msid, setval)] = {
            'before': b,
            'after': a,
            'description': desc
        }

    return changes
def process_violations(msid, violations):
    """Add contextual information for any limit/expected state violations.
    
    :param msid: Current mnemonic
    :param violations: List of individual violations (list of tuples)
    
    """
    data = fetch.Msid(msid,
                      violations[0][0][0],
                      violations[0][0][-1],
                      stat='5min')
    try:
        desc = data.tdb.technical_name
    except:
        desc = 'No Description in TDB'

    violation_dict = {}
    for v in violations:
        limtype = v[-1]
        if 'high' in limtype.lower():
            if limtype not in violation_dict.keys():
                violation_dict.update({
                    limtype: {
                        'starttime': v[0][0],
                        'stoptime': v[0][-1],
                        'num_excursions': 1,
                        'extrema': np.max(v[1]),
                        'limit': v[2][0],
                        'setid': v[3][0],
                        'duration': v[0][-1] - v[0][0]
                    }
                })
            else:
                violation_dict[limtype]['extrema'] = np.max(
                    (np.max(v[1]), violation_dict[limtype]['extrema']))
                violation_dict[limtype]['starttime'] = np.min(
                    (v[0][0], violation_dict[limtype]['starttime']))
                violation_dict[limtype]['stoptime'] = np.max(
                    (v[0][0], violation_dict[limtype]['stoptime']))
                violation_dict[limtype]['num_excursions'] = violation_dict[
                    limtype]['num_excursions'] + 1
                violation_dict[limtype]['duration'] = violation_dict[limtype][
                    'duration'] + v[0][-1] - v[0][0]

        elif 'low' in limtype.lower():
            if limtype not in violation_dict.keys():
                violation_dict.update({
                    limtype: {
                        'starttime': v[0][0],
                        'stoptime': v[0][-1],
                        'num_excursions': 1,
                        'extrema': np.min(v[1]),
                        'limit': v[2][0],
                        'setid': v[3][0],
                        'duration': v[0][-1] - v[0][0]
                    }
                })
            else:
                violation_dict[limtype]['extrema'] = np.min(
                    (np.min(v[1]), violation_dict[limtype]['extrema']))
                violation_dict[limtype]['starttime'] = np.min(
                    (v[0][0], violation_dict[limtype]['starttime']))
                violation_dict[limtype]['stoptime'] = np.max(
                    (v[0][0], violation_dict[limtype]['stoptime']))
                violation_dict[limtype]['num_excursions'] = violation_dict[
                    limtype]['num_excursions'] + 1
                violation_dict[limtype]['duration'] = violation_dict[limtype][
                    'duration'] + v[0][-1] - v[0][0]

        elif 'state' in limtype.lower():
            if limtype not in violation_dict.keys():
                violation_dict.update({
                    limtype: {
                        'starttime': v[0][0],
                        'stoptime': v[0][-1],
                        'num_excursions': 1,
                        'extrema': v[1][0],
                        'limit': v[2][0],
                        'setid': v[3][0],
                        'duration': v[0][-1] - v[0][0]
                    }
                })
            else:
                violation_dict[limtype]['starttime'] = np.min(
                    (v[0][0], violation_dict[limtype]['starttime']))
                violation_dict[limtype]['stoptime'] = np.max(
                    (v[0][0], violation_dict[limtype]['stoptime']))
                violation_dict[limtype]['num_excursions'] = violation_dict[
                    limtype]['num_excursions'] + 1
                violation_dict[limtype]['duration'] = violation_dict[limtype][
                    'duration'] + v[0][-1] - v[0][0]

    for limittype in [
            'warning_low', 'caution_low', 'caution_high', 'warning_high',
            'state'
    ]:
        if limittype in violation_dict.keys():
            violation_dict[limittype][
                'duration'] = violation_dict[limittype]['duration'] / 3600.
            violation_dict[limittype]['description'] = desc
            violation_dict[limittype]['startdate'] = DateTime(
                violation_dict[limittype]['starttime']).date
            violation_dict[limittype]['stopdate'] = DateTime(
                violation_dict[limittype]['stoptime']).date

    return violation_dict
Example #7
0
def remote_fetch(*args, **kwargs):
    import Ska.engarchive.fetch_eng as fetch
    return fetch.Msid(*args, **kwargs)
Example #8
0
# Ephemeris values: position (meters) of Chandra relative to Earth center in
# ECI coordinate frame.
model.comp['orbitephem0_x'].set_data(25000e3)  # 25000 km
model.comp['orbitephem0_y'].set_data(25000e3)  # 25000 km
model.comp['orbitephem0_z'].set_data(25000e3)  # 25000 km

# Normalized attitude quaternions
model.comp['aoattqt1'].set_data(0.0)
model.comp['aoattqt2'].set_data(0.0)
model.comp['aoattqt3'].set_data(0.0)
model.comp['aoattqt4'].set_data(1.0)

# All the usual values here
model.comp['pitch'].set_data(130)
model.comp['eclipse'].set_data(False)
model.comp['sim_z'].set_data(75000)
model.comp['ccd_count'].set_data(6)
model.comp['fep_count'].set_data(6)
model.comp['vid_board'].set_data(1)
model.comp['clocking'].set_data(1)
model.comp['dpa_power'].set_data(0.0)

model.make()
model.calc()

# Note the telemetry MSID is fptemp_11 but the Node name is fptemp
fptemp_11 = fetch_eng.Msid('fptemp_11', start, stop)  # DEGC

plot_cxctime(model.times, model.comp['fptemp'].mvals, 'r-')
plot_cxctime(fptemp_11.times, fptemp_11.vals, 'b-')
Example #9
0
            os.path.join('/proj/sot/ska/data/eng_archive', 'data', content,
                         'colnames.pickle')))
    print('New {}'.format(content))
    new = new_colnames - cur_colnames
    print(', '.join(sorted(new)))
    lost = cur_colnames - new_colnames
    if lost:
        print('LOST: ', lost)

# Plot representative new vals
d1 = '2016:001'
d2 = '2016:002'

msids = set(['1AHIRADF'])
msids.update(['POLAEV2BT', 'POLINE07T', 'POM2THV1T'])
msids.update([
    'OHRTHR35_WIDE', '1OLORADF', '1OHIRADF', '2OLORADF', '2OHIRADF',
    'OOBTHR30_WIDE'
])
msids.update(['AOACIIRS', 'AOACISPX', 'AOACIDPX', 'AOACIMSS'])
msids.update(['4OAVOBAT_WIDE', '4OAVHRMT_WIDE'])
msids.update(['TFSSHDT1', 'TFSSHDT2'])

for msid in msids:
    m = Ska.tdb.msids[msid]
    print(m)
    dat = fetch.Msid(msid, d1, d2)
    plt.figure()
    dat.plot()
    plt.title(msid)
Example #10
0
def plot_msid_interactive(msid='aacccdpt',
                          group='sc',
                          tstart='2001:001',
                          tstop=None,
                          stat='daily',
                          plot_warning_low=False,
                          plot_caution_low=False,
                          plot_caution_high=True,
                          plot_warning_high=True,
                          remove_bads=True,
                          maudefill=False):
    def add_limit_lines():
        if limitquery:
            plotdate_dates = cxctime2plotdate(dates)
            if plot_warning_low:
                _ = ax.step(plotdate_dates,
                            wL,
                            where='post',
                            color='r',
                            zorder=4)
            if plot_caution_low:
                _ = ax.step(plotdate_dates,
                            cL,
                            where='post',
                            color='#FFA500',
                            zorder=6)
            if plot_caution_high:
                _ = ax.step(plotdate_dates,
                            ch,
                            where='post',
                            color='#FFA500',
                            zorder=5)
            if plot_warning_high:
                _ = ax.step(plotdate_dates,
                            wh,
                            where='post',
                            color='r',
                            zorder=3)

    def update_plot_data(fig, ax):
        ticklocs, _, _ = plot_cxctime(data.times[good],
                                      data.vals[good],
                                      fmt='-',
                                      fig=fig,
                                      ax=ax,
                                      color='#555555',
                                      zorder=2)

        if 'none' not in statstr:

            if len(data.vals) > 4000:
                indmax, indmaxstart, blocklen = thin_dataset(data.maxes[good],
                                                             kind='max')
                indmin, indminstart, blocklen = thin_dataset(data.mins[good],
                                                             kind='min')
                times_max = data.cxctimes[good][indmaxstart]
                maxvals = data.maxes[good][indmax]
                times_min = data.cxctimes[good][indminstart]
                minvals = data.mins[good][indmin]

                times = np.sort(np.concatenate((times_max, times_min)))
                maxvals = np.interp(times, times_max, maxvals)
                minvals = np.interp(times, times_min, minvals)

            else:
                times = data.cxctimes[good]
                maxvals = data.maxes[good]
                minvals = data.mins[good]

            times = np.repeat(times, 2)[1:]
            maxes = np.repeat(maxvals, 2)[:-1]
            mins = np.repeat(minvals, 2)[:-1]

            _ = ax.fill_between(times, mins, maxes, color='#aaaaaa', zorder=1)
            #             ax.plot(times_max, maxvals, 'r')
            #             ax.plot(times_min, minvals, 'r')
            #             ax.plot(data.cxctimes[good], data.maxes[good], 'b')
            #             ax.plot(data.cxctimes[good], data.mins[good], 'b')

            # Added to fill in MAUDE data
            if maudefill and maudedata is not None:
                _ = ax.plot(maudedata['data'][0]['cxctimes'],
                            maudedata['data'][0]['values'],
                            color='blue',
                            zorder=2)

        add_limit_lines()

    def get_msid_limits(msid):
        limdict = pylimmon.get_limits(msid)
        enabled = np.array(limdict['limsets'][0]['mlmenable']) == 1
        last_enabled = np.where(enabled)[0]
        if len(last_enabled) > 0:
            if last_enabled[-1] < (len(enabled) - 1):
                enabled[last_enabled[-1] + 1] = True

        wL = np.array(limdict['limsets'][0]['warning_low'])[enabled]
        cL = np.array(limdict['limsets'][0]['caution_low'])[enabled]
        ch = np.array(limdict['limsets'][0]['caution_high'])[enabled]
        wh = np.array(limdict['limsets'][0]['warning_high'])[enabled]
        dates = np.array(limdict['limsets'][0]['times'])[enabled]
        return wL, cL, ch, wh, dates

    if 'none' in str(stat).lower():
        stat = None

    statstr = str(stat).lower()
    msid = msid.lower()

    tstart = DateTime(tstart).secs
    if 'none' in str(tstop).lower():
        tstop = DateTime().secs
    else:
        tstop = DateTime(tstop).secs
    data = fetch.Msid(msid, tstart, tstop, stat=stat)
    data.cxctimes = cxctime2plotdate(data.times)

    if maudefill:
        # This may fail for a large number of reasons, so just use a blanket except.
        # This last bit of data is often not critical anyway.
        # try:
        maudedata = maude.get_msids(msid, data.times[-1])
        maudedata['data'][0]['cxctimes'] = cxctime2plotdate(
            maudedata['data'][0]['times'])

    if remove_bads:
        good = keepind(data.times, group, msid, stat)
    else:
        good = np.array([True] * len(data.times))
    good = np.where(good)[0]

    try:
        # The data object has the `tdb` attribute, however calling that attribute will result
        # in an error if there is no entry in the TDB. Simply using the hasattr command will
        # trigger the error if the entry is missing.
        hasattr(data, 'tdb')
        title = '{}: {}'.format(msid.upper(), data.tdb.technical_name)
    except:
        title = msid.upper()

    if data.unit:
        units = data.unit
    else:
        units = ''
    wide = [
        'OOBTHR02', 'OOBTHR03', 'OOBTHR04', 'OOBTHR05', 'OOBTHR06', 'OOBTHR07',
        'OOBTHR08', 'OOBTHR09', 'OOBTHR10', 'OOBTHR11', 'OOBTHR12', 'OOBTHR13',
        'OOBTHR14', 'OOBTHR15', 'OOBTHR17', 'OOBTHR18', 'OOBTHR19', 'OOBTHR20',
        'OOBTHR21', 'OOBTHR22', 'OOBTHR23', 'OOBTHR24', 'OOBTHR25', 'OOBTHR26',
        'OOBTHR27', 'OOBTHR28', 'OOBTHR29', 'OOBTHR30', 'OOBTHR31', 'OOBTHR33',
        'OOBTHR34', 'OOBTHR35', 'OOBTHR36', 'OOBTHR37', 'OOBTHR38', 'OOBTHR39',
        'OOBTHR40', 'OOBTHR41', 'OOBTHR44', 'OOBTHR45', 'OOBTHR46', 'OOBTHR49',
        'OOBTHR50', 'OOBTHR51', 'OOBTHR52', 'OOBTHR53', 'OOBTHR54', '4OAVHRMT',
        '4OAVOBAT'
    ]
    limitquery = False
    if plot_warning_low or plot_caution_low or plot_caution_high or plot_warning_high:
        try:

            wL, cL, ch, wh, dates = get_msid_limits(msid)

            if msid.upper() in wide:
                wL2, cL2, ch2, wh2, dates2 = get_msid_limits(msid.upper() +
                                                             '_WIDE')
                wL = np.concatenate((wL, wL2))
                cL = np.concatenate((cL, cL2))
                ch = np.concatenate((ch, ch2))
                wh = np.concatenate((wh, wh2))
                dates = np.concatenate((dates, dates2))

            limitquery = True
        except IndexError:
            pass

    if statstr == 'daily':
        xtickfontsize = 20
        fig_h_start = 0.23
        fig_w_start = 0.15
    else:
        xtickfontsize = 14
        fig_h_start = 0.3
        fig_w_start = 0.15
    fig_height = 1. - fig_h_start - 0.1
    fig_width = 1. - fig_w_start - 0.1

    fig = plt.gcf()
    fig.clf()
    fig.set_label(msid.upper())
    ax = fig.add_axes([fig_w_start, fig_h_start, fig_width, fig_height])
    _ = ax.set_xlim(cxctime2plotdate(DateTime([tstart, tstop]).secs))

    _ = ax.grid(True)
    _ = ax.set_ylabel(units, fontsize=22)
    _ = ax.set_xlabel('Time (Seconds)', fontsize=22)
    _ = ax.set_title(title, fontsize=30, y=1.03)
    _ = ax.tick_params(axis='both', which='major', labelsize=20)
    plt.setp(ax.xaxis.get_majorticklabels(),
             rotation_mode='anchor',
             ha='right')

    #     ax.callbacks.connect('xlim_changed', update_plot_data)

    update_plot_data(fig, ax)
Example #11
0
# Use MSID values from telemetry to initialize if available
model.comp['1pdeaat'].set_data(20.0)
model.comp['pin1at'].set_data(20.0)

## INPUT DATA COMPONENTS

# These initializations are used needed for predicting into the future.
# For analyzing back-orbit data, do not set any of these and let xija
# grab the right values from telemetry.

# All the usual values here
model.comp['pitch'].set_data(130)
model.comp['sim_z'].set_data(75000)
model.comp['ccd_count'].set_data(6)
model.comp['fep_count'].set_data(6)
model.comp['vid_board'].set_data(1)
model.comp['clocking'].set_data(1)
model.comp['dpa_power'].set_data(0.0)

# Detector housing heater.  Set to True for heater ON, False for heater OFF.
model.comp['dh_heater'].set_data(True)

model.make()
model.calc()

# Note the telemetry MSID is fptemp_11 but the Node name is fptemp
pdeaat = fetch_eng.Msid('1pdeaat', start, stop)  # DEGC

plot_cxctime(model.times, model.comp['1pdeaat'].mvals, 'r-')
plot_cxctime(pdeaat.times, pdeaat.vals, 'b-')