def fetch(self, msid, attr='vals', method='linear'): """Get data from the Chandra engineering archive. Parameters ---------- msid : attr : (Default value = 'vals') method : (Default value = 'linear') Returns ------- """ tpad = DEFAULT_DT * 5.0 datestart = DateTime(self.tstart - tpad).date datestop = DateTime(self.tstop + tpad).date logger.info('Fetching msid: %s over %s to %s' % (msid, datestart, datestop)) try: import Ska.engarchive.fetch_sci as fetch tlm = fetch.MSID(msid, datestart, datestop, stat='5min') tlm.filter_bad_times() except ImportError: raise ValueError('Ska.engarchive.fetch not available') if tlm.times[0] > self.tstart or tlm.times[-1] < self.tstop: raise ValueError('Fetched telemetry does not span model start and ' 'stop times for {}'.format(msid)) vals = Ska.Numpy.interpolate(getattr(tlm, attr), tlm.times, self.times, method=method) return vals
def get_prediction_data(self, tstart, tstop, T_init, att_data, cmd_states): times = self._eng_match_times(tstart, tstop, 328.0) states = interpolate_states(cmd_states, times) if T_init is None: msid_vals = fetch.MSID(self.msid, tstart, tstop, stat='5min', filter_bad=True) msid_vals.interpolate(times=times) msid_vals = msid_vals.vals else: msid_vals = T_init * np.ones_like(times) combined_dict = {'msid_times': times, 'msid_vals': msid_vals} att_times = att_data.pop("times") d_sun = Ska.Numpy.interpolate(att_data.pop("d_sun"), att_times, times, method="linear") combined_dict['d_sun'] = d_sun for input in self.inputs: if input in att_data: combined_dict[input] = Ska.Numpy.interpolate(att_data[input], att_times, times, method="linear") elif input == "sim_z": combined_dict[ "sim_z"] = -0.0025143153015598743 * states["simpos"] elif input in pwr_states: combined_dict[input] = states[input] return pd.DataFrame(combined_dict)
def get_obs_temps(obsid, outdir): try: manvrs = events.manvrs.filter(obsid=obsid) dwells = events.dwells.filter(obsid=obsid) except ValueError: return None if len(manvrs) and len(dwells): ccd_temp = fetch_sci.MSID('AACCCDPT', manvrs[0].stop, dwells[0].stop) if len(ccd_temp.vals) == 0: return None return {'max': ccd_temp.vals.max(), 'mean': ccd_temp.vals.mean()}
def calc_stats(obsid): obspar = mica.archive.obspar.get_obspar(obsid, version='last') if not obspar: raise ValueError("No obspar for {}".format(obsid)) manvr = None dwell = None try: manvrs = events.manvrs.filter(obsid=obsid, n_dwell__gt=0) if len(manvrs) == 0: raise ValueError dwells = events.dwells.filter(obsid=obsid) # Use the last manvr and the first dwell manvr = manvrs[manvrs.count() - 1] dwell = dwells[0] except ValueError: multi_manvr = events.manvrs.filter(start=obspar['tstart'] - 10000, stop=obspar['tstart'] + 10000) multi = multi_manvr.select_overlapping(events.obsids(obsid=obsid)) deltas = [np.abs(m.tstart - obspar['tstart']) for m in multi] manvr = multi[np.argmin(deltas)] dwell = manvr.dwell_set.first() if not manvr or not dwell: raise ValueError("No manvr or dwell for {}".format(obsid)) logger.info("Found obsid manvr at {}".format(manvr.start)) logger.info("Found dwell at {}".format(dwell.start)) acq_start = manvr.acq_start guide_start = manvr.guide_start try: starcheck = get_starcheck_catalog_at_date(manvr.acq_start) except: # bad timelines for these observations, skip the tstart # input for get_starcheck_catalog if obsid in [1966]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2002/JAN1202/oflsa') elif obsid in [3105, 2741, 61334, 61333, 61332, 61331, 3358, 3357]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2002/JAN2802/oflsd/') elif obsid in [61261]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2002/MAR1902/oflsa/') elif obsid in [ 3471, 3086, 61250, 61249, 3094, 3066, 3115, 2833, 3464, 3175 ]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2002/MAR2502/oflsb/') elif obsid in [3663, 61185, 61184, 3392, 61183]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2002/MAY2402/oflsa/') elif obsid in [60983]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2002/OCT2102/oflsc/') elif obsid in [60640, 60639, 60638, 60637, 60636, 60635, 60634, 60633]: raise ValueError("Starcheck not available for PCHECK_JUL2003") elif obsid in [60616, 60615]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2003/JUL2103/oflsc/') elif obsid in [3911]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2003/JUL2803/oflsc/') elif obsid in [4162]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2003/SEP2903/oflsa/') elif obsid in [60401]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2004/JAN1904/oflsb/') elif obsid in [59921, 5035]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2004/DEC1404/oflsc/') elif obsid in [58548, 58547, 58546, 7753]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2007/JAN2907/oflsb/') elif obsid in [7936, 7463]: starcheck = get_starcheck_catalog(obsid, mp_dir='/2007/MAY2807/oflsb/') else: raise ValueError( "Problem looking up starcheck for {}".format(obsid)) if starcheck is None or 'cat' not in starcheck or not len( starcheck['cat']): raise ValueError('No starcheck catalog found for {}'.format( manvr.get_obsid())) starcat_time = DateTime(starcheck['cat']['mp_starcat_time'][0]).secs starcat_dtime = starcat_time - DateTime(manvr.start).secs # If it looks like the wrong starcheck by time, give up if abs(starcat_dtime) > 300: raise ValueError( "Starcheck cat time delta is {}".format(starcat_dtime)) if abs(starcat_dtime) > 30: logger.warning("Starcheck cat time delta of {} is > 30 sec".format( abs(starcat_dtime))) vals, times, one_shot, star_info = get_modern_data(manvr, dwell, starcheck) acq_stats = calc_acq_stats(manvr, vals, times) obsid_info = { 'obsid': obsid, 'obi': obspar['obi_num'], 'acq_start': acq_start, 'guide_start': guide_start, 'guide_tstart': DateTime(guide_start).secs, 'one_shot_length': one_shot, 'revision': '1.0' } catalog = Table(starcheck['cat']) catalog.sort('idx') # Filter the catalog to be just acquisition stars catalog = catalog[(catalog['type'] == 'ACQ') | (catalog['type'] == 'BOT')] time = DateTime(guide_start).secs ccd_temp = np.mean(fetch_sci.MSID('AACCCDPT', time - 250, time + 250).vals) warm_threshold = 100.0 warm_frac = dark_model.get_warm_fracs(warm_threshold, time, ccd_temp) temps = {'ccd_temp': ccd_temp, 'n100_warm_frac': warm_frac} return obsid_info, acq_stats, star_info, catalog, temps
import os import sys from matplotlib.pyplot import * import Ska.engarchive.fetch_sci as fetch from Ska.Matplotlib import plot_cxctime print 'Fetch file is', fetch.__file__ print 'ENG_ARCHIVE is', os.environ['ENG_ARCHIVE'] msids = ('1crat', 'fptemp_11', 'orbitephem0_x', 'sim_z', 'tephin') rootdir = os.path.dirname(__file__) for ifig, msid in enumerate(msids): figure(ifig + 1) clf() dat = fetch.MSID(msid, '2010:250', '2011:100', filter_bad=True) dat5 = fetch.MSID(msid, '2010:250', '2011:100', stat='5min') datday = fetch.MSID(msid, '2010:250', '2011:100', stat='daily') subplot(3, 1, 1) plot_cxctime(dat.times, dat.vals, '-b') grid() subplot(3, 1, 2) plot_cxctime(dat5.times, dat5.means, '-r') grid() subplot(3, 1, 3) plot_cxctime(datday.times, datday.means, '-c') grid() savefig(os.path.join(rootdir, 'plot_{0}.png'.format(msid)))
def main(): global opt opt, args = get_options() tstart = DateTime(opt.tstart).secs tstop = DateTime(opt.tstop).secs # Get orbital ephemeris in requested time range print('Fetching ephemeris') ephem_x = fetch.MSID('orbitephem0_x', opt.tstart, opt.tstop) ephem_y = fetch.MSID('orbitephem0_y', opt.tstart, opt.tstop) ephem_z = fetch.MSID('orbitephem0_z', opt.tstart, opt.tstop) ephem_times = ephem_x.times.copy() # Get spacecraft attitude in requested time range at the same sampling as ephemeris print('Fetching attitude telemetry between {0} and {1}'.format( opt.tstart, opt.tstop)) qatts = fetch.MSIDset(['aoattqt1', 'aoattqt2', 'aoattqt3', 'aoattqt4'], opt.tstart, opt.tstop) #cols, atts = fetch(start=ephem.Time[0], stop=ephem.Time[-1], dt=dt, time_format='secs', # colspecs=) # atts = np.rec.fromrecords(atts, names=cols) q1s = qatts['aoattqt1'].vals[::opt.sample] q2s = qatts['aoattqt2'].vals[::opt.sample] q3s = qatts['aoattqt3'].vals[::opt.sample] q4s = qatts['aoattqt4'].vals[::opt.sample] q_times = qatts['aoattqt1'].times[::opt.sample] ephem_x_vals = Ska.Numpy.interpolate(ephem_x.vals, ephem_times, q_times) ephem_y_vals = Ska.Numpy.interpolate(ephem_y.vals, ephem_times, q_times) ephem_z_vals = Ska.Numpy.interpolate(ephem_z.vals, ephem_times, q_times) chandra_ecis = np.array([ephem_x_vals, ephem_y_vals, ephem_z_vals]).copy().transpose() if opt.movie: if len(atts) > 250: print "Error: movie option will produce more than 250 images. Change code if needed." sys.exit(0) if not os.path.exists(opt.out): os.makedirs(opt.out) # Divy up calculations amongst the n-processors i0s = range(0, len(q1s), len(q1s) // opt.nproc + 1) i1s = i0s[1:] + [len(q1s)] t0 = time.time() # Calculate illumination in a separate process over each sub-interval queues = [] procs = [] for iproc, i0, i1 in zip(itertools.count(), i0s, i1s): queue = Queue() proc = Process(target=calc_vis_values, args=(queue, iproc, q_times[i0:i1], chandra_ecis[i0:i1], q1s[i0:i1], q2s[i0:i1], q3s[i0:i1], q4s[i0:i1])) proc.start() procs.append(proc) queues.append(queue) # Join the results from each processor at the end outvals = [] for proc, queue in zip(procs, queues): outvals.extend(queue.get()) proc.join() print print 'calc_esa:', time.time() - t0 t0 = time.time() esa_directs = np.ndarray(len(q_times)) esa_refls = np.ndarray(len(q_times)) for i, t, q1, q2, q3, q4, x, y, z in zip(itertools.count(), q_times, q1s, q2s, q3s, q4s, ephem_x_vals, ephem_y_vals, ephem_z_vals): direct, refl, total = Chandra.acis_esa.earth_solid_angle( Quaternion.Quat([q1, q2, q3, q4]), np.array([x, y, z])) esa_directs[i] = direct esa_refls[i] = refl print 'calc_esa:', time.time() - t0 # Plot illumination versus date fig = plt.figure(1, figsize=(6, 4)) plt.clf() illum = np.rec.fromrecords( outvals, names=['time', 'direct', 'reflect', 'alt', 'q1', 'q2', 'q3', 'q4']) ticklocs, fig, ax = plot_cxctime(illum.time, illum.direct + illum.reflect, '-b') # plot_cxctime(illum.time, illum.reflect, '-r') # plot_cxctime(illum.time, illum.direct, '-r') # plot_cxctime(q_times, esa_directs, '-c') # plot_cxctime(q_times, esa_refls, '-m') plot_cxctime(q_times, esa_directs + esa_refls, '-r') ax.set_title('ACIS radiator illumination') ax.set_ylabel('Illumination (steradians)') filename = opt.out + '.png' fig.savefig(filename) print 'Create image file', filename # Write results to FITS table filename = opt.out + '.fits' Ska.Table.write_fits_table(opt.out + '.fits', illum) print 'Created FITS table', filename if opt.movie: print 'To make a movie run the following command:' print 'convert -delay 30 %s/*.png -loop 0 %s.gif' % (opt.out, opt.out)
def calc_stats(obsid): obspar = mica.archive.obspar.get_obspar(obsid) if not obspar: raise ValueError("No obspar for {}".format(obsid)) manvr = None dwell = None try: manvrs = events.manvrs.filter(obsid=obsid, n_dwell__gt=0) dwells = events.dwells.filter(obsid=obsid) if dwells.count() == 1 and manvrs.count() == 0: # If there is more than one dwell for the manvr but they have # different obsids (unusual) so don't throw an overlapping interval kadi error # just get the maneuver to the attitude with this dwell dwell = dwells[0] manvr = dwell.manvr elif dwells.count() == 0: # If there's just nothing, that doesn't need an error here # and gets caught outside the try/except pass else: # Else just take the first matches from each manvr = manvrs[0] dwell = dwells[0] except ValueError: multi_manvr = events.manvrs.filter(start=obspar['tstart'] - 100000, stop=obspar['tstart'] + 100000) multi = multi_manvr.select_overlapping(events.obsids(obsid=obsid)) deltas = [np.abs(m.tstart - obspar['tstart']) for m in multi] manvr = multi[np.argmin(deltas)] dwell = manvr.dwell_set.first() if not manvr or not dwell: raise ValueError("No manvr or dwell for {}".format(obsid)) if not manvr.get_next(): raise ValueError("No *next* manvr so can't calculate dwell") if not manvr.guide_start: raise ValueError("No guide transition for {}".format(obsid)) if not manvr.kalman_start: raise ValueError("No Kalman transition for {}".format(obsid)) logger.info("Found obsid manvr at {}".format(manvr.start)) logger.info("Found dwell at {}".format(dwell.start)) starcheck = get_starcheck_catalog_at_date(manvr.guide_start) if starcheck is None or 'cat' not in starcheck or not len( starcheck['cat']): raise ValueError('No starcheck catalog found for {}'.format( manvr.get_obsid())) starcat_time = DateTime(starcheck['cat']['mp_starcat_time'][0]).secs starcat_dtime = starcat_time - DateTime(manvr.start).secs # If it looks like the wrong starcheck by time, give up if abs(starcat_dtime) > 300: raise ValueError( "Starcheck cat time delta is {}".format(starcat_dtime)) if abs(starcat_dtime) > 30: logger.warning("Starcheck cat time delta of {} is > 30 sec".format( abs(starcat_dtime))) # The NPNT dwell should end when the next maneuver starts, but explicitly confirm via pcadmd pcadmd = fetch.Msid('AOPCADMD', manvr.kalman_start, manvr.get_next().tstart + 20) next_nman_start = pcadmd.times[pcadmd.vals != 'NPNT'][0] vals, star_info = get_data(start=manvr.kalman_start, stop=next_nman_start, obsid=obsid, starcheck=starcheck) gui_stats = calc_gui_stats(vals, star_info) obsid_info = { 'obsid': obsid, 'obi': obspar['obi_num'], 'kalman_datestart': manvr.kalman_start, 'kalman_tstart': DateTime(manvr.kalman_start).secs, 'npnt_tstop': DateTime(next_nman_start).secs, 'npnt_datestop': DateTime(next_nman_start).date, 'revision': STAT_VERSION } catalog = Table(starcheck['cat']) catalog.sort('idx') guide_catalog = catalog[(catalog['type'] == 'GUI') | (catalog['type'] == 'BOT')] aacccdpt = fetch_sci.MSID('AACCCDPT', manvr.kalman_start, manvr.get_next().start) warm_threshold = 100.0 tccd_mean = np.mean(aacccdpt.vals) tccd_max = np.max(aacccdpt.vals) warm_frac = dark_model.get_warm_fracs(warm_threshold, manvr.start, tccd_mean) temps = { 'tccd_mean': tccd_mean, 'n100_warm_frac': warm_frac, 'tccd_max': tccd_max } return obsid_info, gui_stats, star_info, guide_catalog, temps
def __init__(self, name, tstart, tstop, states=None, T_init=None, get_msids=True, dt=328.0, model_spec=None, mask_bad_times=False, ephem_file=None, evolve_method=None, rk4=None, tl_file=None, no_eclipse=False, compute_model=None): self.name = name.lower() self.sname = short_name[name] if self.sname in short_name_rev: self.model_check = importlib.import_module(f"{self.sname}_check") else: self.model_check = None self.model_spec = find_json(name, model_spec) self.ephem_file = ephem_file tstart = get_time(tstart) tstop = get_time(tstop) tstart_secs = DateTime(tstart).secs self.no_earth_heat = getattr(self, "no_earth_heat", False) if states is not None: if isinstance(states, States): states_obj = states states = states.as_array() else: if "tstart" not in states: states["tstart"] = DateTime(states["datestart"]).secs if "tstop" not in states: states["tstop"] = DateTime(states["datestop"]).secs num_states = states["tstart"].size if "letg" not in states: states["letg"] = np.array(["RETR"]*num_states) if "hetg" not in states: states["hetg"] = np.array(["RETR"]*num_states) states_obj = States(states) else: states_obj = EmptyTimeSeries() if T_init is None: T_init = fetch.MSID(self.name, tstart_secs-700., tstart_secs+700.).vals.mean() if compute_model is not None: self.xija_model = compute_model(self.name, tstart, tstop, states, dt, T_init, model_spec, evolve_method, rk4) elif self.name in short_name and states is not None: self.xija_model = self._compute_acis_model(self.name, tstart, tstop, states, dt, T_init, rk4=rk4, no_eclipse=no_eclipse, evolve_method=evolve_method) else: self.xija_model = self._compute_model(name, tstart, tstop, dt, T_init, evolve_method=evolve_method, rk4=rk4) self.bad_times = getattr(self.xija_model, "bad_times", None) self.bad_times_indices = getattr(self.xija_model, "bad_times_indices", None) if isinstance(states, dict): states.pop("dh_heater", None) components = [self.name] if 'dpa_power' in self.xija_model.comp: components.append('dpa_power') if 'earthheat__fptemp' in self.xija_model.comp: components.append('earthheat__fptemp') if states is None: components += ["pitch", "roll", "fep_count", "vid_board", "clocking", "ccd_count", "sim_z"] masks = {} if mask_bad_times and self.bad_times is not None: masks[self.name] = np.ones(self.xija_model.times.shape, dtype='bool') for (left, right) in self.bad_times_indices: masks[self.name][left:right] = False model_obj = Model.from_xija(self.xija_model, components, masks=masks) if get_msids: msids_obj = self._get_msids(model_obj, [self.name], tl_file) else: msids_obj = EmptyTimeSeries() super(ThermalModelRunner, self).__init__(msids_obj, states_obj, model_obj)