def __init__(self, tstart, tstop, msids, recent_source="maude", filter_bad=False, stat='5min', user=None, password=None, get_states=True, state_keys=None): msids = ensure_list(msids) tstart = get_time(tstart, fmt='secs') tstop = get_time(tstop, fmt='secs') tmid = 1.0e99 for msid in msids: tm = fetch.get_time_range(msid, format="secs")[-1] tmid = min(tmid, tm) tmid = get_time(tmid, fmt='secs') if tmid < tstop: msids1 = MSIDs.from_database(msids, tstart, tstop=tmid, filter_bad=filter_bad, stat=stat) if recent_source == "maude": msids2 = MSIDs.from_maude(msids, tmid, tstop=tstop, user=user, password=password) elif recent_source == "tracelog": msids2 = _parse_tracelogs(tmid, tstop, ["/data/acis/eng_plots/acis_eng_10day.tl", "/data/acis/eng_plots/acis_dea_10day.tl"], None) msids = ConcatenatedMSIDs(msids1, msids2) else: msids = MSIDs.from_database(msids, tstart, tstop=tstop, filter_bad=filter_bad, stat=stat) if get_states: states = States.from_kadi_states(tstart, tstop, state_keys=state_keys) else: states = EmptyTimeSeries() model = EmptyTimeSeries() super(TelemData, self).__init__(msids, states, model)
def write_msids(self, filename, fields, mask=None, overwrite=False): """ Write MSIDs (or MSID-like quantities such as model values) to an ASCII table file. This assumes that all of the quantities have been interpolated to a common set of times. Parameters ---------- filename : string The filename to write the quantities to. fields : list of (type, name) field specifications The quantities to be written to the ASCII table. overwrite : boolean, optional If True, an existing file with the same name will be overwritten. """ from astropy.table import Table fields = ensure_list(fields) base_times = self.dates(*fields[0]) if mask is None: mask = slice(None, None, None) if len(fields) > 1: for field in fields[1:]: if not np.all(base_times == self.dates(*field)): raise RuntimeError("To write MSIDs, all of the times should be the same," + "but '%s', '%s' does not have the same " % field + "set of times as '%s', '%s'!" % (fields[0][0], fields[0][1])) data = dict(("_".join(k), self[k].value[mask]) for k in fields) data["times"] = self.times(*fields[0]).value[mask] data["dates"] = self.dates(*fields[0])[mask] Table(data).write(filename, format='ascii', overwrite=overwrite)
def from_database(cls, tstart, tstop, state_keys=None, server=None): from Chandra.cmd_states import fetch_states tstart = get_time(tstart) tstop = get_time(tstop) if state_keys is not None: state_keys = ensure_list(state_keys) t = fetch_states(tstart, tstop, vals=state_keys, server=server) return cls(t)
def from_kadi_states(cls, tstart, tstop, state_keys=None): from kadi.commands import states tstart = get_time(tstart) tstop = get_time(tstop) if state_keys is not None: state_keys = ensure_list(state_keys) t = states.get_states(tstart, tstop, state_keys=state_keys, merge_identical=True).as_array() return cls(t)
def __init__(self, load, comps=None, get_msids=False, tl_file=None, states_comp="DPA"): if comps is None: comps = ["1deamzt", "1dpamzt", "1pdeaat", "fptemp_11", "tmp_fep1_mong", "tmp_fep1_actel", "tmp_bep_pcb"] comps = ensure_list(comps) model = Model.from_load_page(load, comps) states = States.from_load_page(load, comp=states_comp) if get_msids: msids = self._get_msids(model, comps, tl_file) else: msids = EmptyTimeSeries() super(ThermalModelFromLoad, self).__init__(msids, states, model)
def from_maude(cls, msids, tstart, tstop=None, user=None, password=None): import maude tstart = get_time(tstart) tstop = get_time(tstop) msids = ensure_list(msids) msids, derived_msids = check_depends(msids) table = {} times = {} state_codes = {} out = maude.get_msids(msids, start=tstart, stop=tstop, user=user, password=password) for msid in out["data"]: k = msid["msid"].lower() table[k] = msid["values"] times[k] = msid['times'] state_codes[k] = get_state_codes(k) return cls(table, times, state_codes=state_codes, derived_msids=derived_msids)
def from_database(cls, msids, tstart, tstop=None, filter_bad=False, stat='5min', interpolate=None, interpolate_times=None): tstart = get_time(tstart) tstop = get_time(tstop) msids = ensure_list(msids) msids, derived_msids = check_depends(msids) msids = [msid.lower() for msid in msids] data = fetch.MSIDset(msids, tstart, stop=tstop, filter_bad=filter_bad, stat=stat) table = {} times = {} state_codes = {} masks = {} if interpolate is not None: if interpolate_times is None: # Get the nominal tstart / tstop range max_fetch_tstart = max(msid.times[0] for msid in data.values()) min_fetch_tstop = min(msid.times[-1] for msid in data.values()) dt = 328.0 start = DateTime(tstart).secs if tstart else data.tstart stop = DateTime(tstop).secs if tstop else data.tstop start = max(start, max_fetch_tstart) stop = min(stop, min_fetch_tstop) interpolate_times = np.arange((stop - start) // dt + 1) * dt + start else: interpolate_times = DateTime(interpolate_times).secs for k, msid in data.items(): if interpolate is not None: indexes = Ska.Numpy.interpolate(np.arange(len(msid.times)), msid.times, interpolate_times, method=interpolate, sorted=True) times[k.lower()] = interpolate_times else: indexes = slice(None, None, None) times[k.lower()] = data[k].times if msid.state_codes: state_codes[k] = dict((k, v) for v, k in msid.state_codes) table[k.lower()] = msid.vals[indexes] if msid.bads is not None: masks[k.lower()] = (~msid.bads)[indexes] return cls(table, times, state_codes=state_codes, masks=masks, derived_msids=derived_msids)
def _parse_tracelogs(tbegin, tend, filenames, other_msids): filenames = ensure_list(filenames) if tbegin is not None: tbegin = get_time(tbegin) if tend is not None: tend = get_time(tend) msid_objs = [] for filename in filenames: # Figure out what kind of file this is f = open(filename, "r") line = f.readline() f.close() if line.startswith("TIME"): msids = MSIDs.from_tracelog(filename, tbegin=tbegin, tend=tend) elif line.startswith("#YEAR") or line.startswith("YEAR"): msids = MSIDs.from_mit_file(filename, tbegin=tbegin, tend=tend) else: raise RuntimeError("I cannot parse this file!") msid_objs.append(msids) if other_msids is not None: msid_objs.append(MSIDs.from_database(other_msids, tbegin, tend)) all_msids = CombinedMSIDs(msid_objs) return all_msids
def from_load_page(cls, load, components, time_range=None): components = [comp.lower() for comp in components] load = find_load(load) mylog.info("Reading model data from the %s load." % load) components = ensure_list(components) if "fptemp_11" in components: components.append("earth_solid_angle") data = {} for comp in components: if comp == "earth_solid_angle": url = "http://cxc.cfa.harvard.edu/acis/FP_thermPredic/" url += "%s/ofls%s/earth_solid_angles.dat" % (load[:-1].upper(), load[-1].lower()) table_key = comp else: c = comp_map[comp].upper() table_key = "fptemp" if comp == "fptemp_11" else comp url = "http://cxc.cfa.harvard.edu/acis/%s_thermPredic/" % c url += "%s/ofls%s/temperatures.dat" % (load[:-1].upper(), load[-1].lower()) u = requests.get(url) if not u.ok: if table_key == "earth_solid_angle": mylog.warning("Could not find the earth solid angles file. Skipping.") else: mylog.warning("Could not find the model page for '%s'. Skipping." % comp) continue table = ascii.read(u.text) if time_range is None: idxs = np.ones(table["time"].size, dtype='bool') else: idxs = np.logical_and(table["time"] >= time_range[0], table["time"] <= time_range[1]) times = Quantity(table["time"][idxs], 's') data[comp] = APQuantity(table[table_key].data[idxs], times, get_units("model", comp), dtype=table[table_key].data.dtype) return cls(table=data)