def flex_read_stations(filenames: str or list): """ Takes in a list of strings and tries to read them as inventories Creates a single inventory, not an aggregate of inventories :param filename: station file(s). wildcards permitted. :return: `obspy.Inventory` """ if type(filenames) is str: filenames = [filenames] inv = Inventory() for _file in filenames: try: add_inv = read_inventory(_file) for network in add_inv: if len(inv.select(network=network.code)) == 0: inv.networks.append(network) else: new_network = inv.select(network=network.code)[0] # print(new_network) for station in network: if len(new_network.select(station=station.code)) == 0: new_network.stations.append(station) inv = inv.remove(network=network.code) inv.networks.append(new_network) except Exception as e: print("%s could not be read. Error: %s" % (_file, e)) return inv
def test_result(self): exp_inv = read_inventory() inv0 = Inventory([exp_inv[0]]) inv1 = Inventory([exp_inv[1]]) invl = [inv0, inv1] self.assertEqual(exp_inv, pu.join_inv(invl)) # Just testing the test self.assertNotEqual(inv0, inv1)
def files2inv(directory, source=''): # Read individual stationxmls to inventory from obspy import read_inventory, Inventory from glob import glob files = glob(directory) inv = Inventory(networks=[], source=source) for filename in files: sing_sta_inv = read_inventory(filename) if sing_sta_inv[0].code in inv.get_contents()['networks']: inv[0].stations += sing_sta_inv[0].stations else: inv += sing_sta_inv return inv
def __init__( self, event_id: str = "2019p922847", client: Client = Client("GEONET"), n_stations: int = 5, excluded_channels: Iterable = ( "BNZ", "BNN", "BNE", "BN1", "BN2", "HNZ", "HNN", "HNE", "HN1", "HN2",), excluded_stations: Iterable = ("TUWZ"), all_components: bool = True, length: float = 60., pre_pick: float = 10., wood_anderson: bool = True, ): self.event_id = event_id self.client = client self.excluded_channels = excluded_channels self.excluded_stations = excluded_stations self.st, self._raw = Stream(), Stream() self.inventory = Inventory() self._get_st( n_stations=n_stations, all_components=all_components, length=length, pre_pick=pre_pick) self._get_inventory() self.amplitude_units = "counts" if wood_anderson: self.wood_anderson() self.amplitude_units = "m"
def get_stations(inv: obspy.Inventory, **kwargs) -> obspy.Inventory: """ Return new stations whose channels meet the filter parameters. See obspy.clients.fdsn.Client for supported parameters. """ if not kwargs: # no filter requested, return original stations return inv if set(kwargs) - SUPPORTED_ARGS: unsupported = set(kwargs) - SUPPORTED_ARGS msg = f"{unsupported} are not supported by stations get_stations" raise TypeError(msg) inv = copy.deepcopy(inv) keep_ids = _get_keep_ids(inv, **kwargs) # iterate over inv and remove channels/stations that dont meet reqs. for net in inv: for sta in net: # only keep channels that meet reqs. sta.channels = [x for x in sta.channels if id(x) in keep_ids] # only keep stations that meet reqs or have channels that do net.stations = [ x for x in net.stations if id(x) in keep_ids or len(x.channels) ] # only keep networks that have some stations inv.networks = [x for x in inv.networks if len(x.stations)] return inv
def create_inv(network_code, station_code, location_code, channel_code, isr, sf, u): writethisinv = Inventory( networks=[ Network(code=network_code, start_date=obspy.UTCDateTime('2007-01-01'), stations=[ Station( code=station_code, latitude=1, longitude=2, elevation=3, creation_date=obspy.UTCDateTime('2007-01-01'), site=Site(name='site'), channels=[ Channel( code=channel_code, location_code=location_code, start_date=obspy.UTCDateTime('2007-01-01'), latitude=1, longitude=2, elevation=3, depth=4, response=create_response( inputsamplerate=isr, scaling_factor=sf, units=u)) ]) ]) ], source= 'Joseph Farrugia, Ocean Networks Canada', # The source should be the id whoever create the file. created=obspy.UTCDateTime(datetime.today())) return writethisinv
def test_nice_ringlaser_metadata_error_msg(self): with self.assertRaises(TypeError) as e: PPSD(stats=Stats(), metadata=Inventory(networks=[], source=""), special_handling='ringlaser') expected = ("When using `special_handling='ringlaser'`, `metadata` " "must be a plain dictionary with key 'sensitivity' " "stating the overall sensitivity`.") self.assertEqual(str(e.exception), expected)
def test_nice_ringlaser_metadata_error_msg(self): expected = ("When using `special_handling='ringlaser'`, `metadata` " "must be a plain dictionary with key 'sensitivity' " "stating the overall sensitivity`.") with pytest.raises(TypeError, match=re.escape(expected)): PPSD(stats=Stats(), metadata=Inventory(networks=[], source=""), special_handling='ringlaser')
def read_stations(path_to_stations): """ Convert a Specfem3D STATIONS file into an ObsPy Inventory object. Specfem3D STATION files contain no channel or location information, so the inventory can only go down to the station level. Note: This assumes a row structure for the station file is STA, NET, LAT [deg], LON [deg], ELEVATION [m], BURIAL [m] :type path_to_stations: str :param path_to_stations: the path to the STATIONS file that is associated with the Specfem3D DATA directory :rtype: obspy.core.inventory.Inventory :return: a station-level Inventory object """ stations = np.loadtxt(path_to_stations, dtype="str") # Get all the unique network names, try-except to catch when there is only # one station in the file try: networks = {_: [] for _ in np.unique(stations[:, 1])} except IndexError: networks = {stations[1]: []} stations = [stations] for sta in stations: # Parse the station information station_ = sta[0] network_ = sta[1] latitude_ = float(sta[2]) longitude_ = float(sta[3]) elevation_ = float(sta[4]) burial_ = float(sta[5]) # burial isnt an option in ObsPy, not used # Create the station object, temp store in a network station = Station(code=station_, latitude=latitude_, longitude=longitude_, elevation=elevation_, creation_date=UTCDateTime() ) networks[network_].append(station) # Create the network objects list_of_networks = [] for network, stations in networks.items(): list_of_networks.append(Network(code=network, stations=stations)) return Inventory(networks=list_of_networks, source="PYATOA")
def load_asdf(filename: str, no_event=False): """Takes in a filename of an asdf file and outputs event, inventory, and stream with the traces. Note that this is only good for asdffiles with one set of traces event and stations since the function will get the first/only waveform tag from the dataset Args: filename: ASDF filename. "somethingsomething.h5" Returns: Event, Inventory, Stream """ ds = ASDFDataSet(filename) # Create empty streams and inventories inv = Inventory() st = Stream() # Get waveform tag tag = list(ds.waveform_tags)[0] for station in ds.waveforms.list(): try: st += getattr(ds.waveforms[station], tag) inv += ds.waveforms[station].StationXML except Exception as e: print(e) # Choose not to load an event from the asdf file (pycmt3d's event doesn't # output an event...) if not no_event: ev = ds.events[0] del ds return ev, inv, st else: del ds return inv, st
# -*- coding: utf-8 -*- """ Created on Mon Jun 10 19:26:01 2019 @author: jpeacock """ from obspy import Inventory from obspy.core.inventory import Network from obspy.core.util import AttribDict ns = 'http://some-page.de/xmlns/1.0' Channel = AttribDict() Channel.namespace = ns Channel.value = AttribDict() Channel.value.my_nested_tag1 = AttribDict() Channel.value.my_nested_tag1.namespace = ns Channel.value.my_nested_tag1.value = 1.23E+10 Channel.value.my_nested_tag2 = AttribDict() Channel.value.my_nested_tag2.namespace = ns Channel.value.my_nested_tag2.value = True inv = Inventory([Network('XX')], 'XX') inv[0].extra = AttribDict() inv[0].extra.Channel = Channel inv.write('my_inventory.xml', format='STATIONXML', nsmap={'somepage_ns': 'http://some-page.de/xmlns/1.0'})
def gather_waveforms(source, network, station, location, channel, starttime, endtime, time_buffer=0, merge_fill_value=0, trim_fill_value=0, remove_response=False, return_failed_stations=False, watc_url=None, watc_username=None, watc_password=None): """ Gather seismic/infrasound waveforms from IRIS or WATC FDSN, or AVO Winston, and output a :class:`~obspy.core.stream.Stream` with station/element coordinates attached. Optionally remove the sensitivity. **NOTE** Usual RTM usage is to specify a starttime/endtime that brackets the estimated source origin time. Then time_buffer is used to download enough extra data to account for the time required for an infrasound signal to propagate to the farthest station. Args: source (str): Which source to gather waveforms from. Options are: * `'IRIS'` – IRIS FDSN * `'WATC'` – WATC FDSN * `'AVO'` – AVO Winston network (str): SEED network code [wildcards (``*``, ``?``) accepted] station (str): SEED station code [wildcards (``*``, ``?``) accepted] location (str): SEED location code [wildcards (``*``, ``?``) accepted] channel (str): SEED channel code [wildcards (``*``, ``?``) accepted] starttime (:class:`~obspy.core.utcdatetime.UTCDateTime`): Start time for data request endtime (:class:`~obspy.core.utcdatetime.UTCDateTime`): End time for data request time_buffer (int or float): Extra amount of data to download after `endtime` [s] merge_fill_value (bool, int, float, str, or None): Controls merging of :class:`~obspy.core.trace.Trace` objects with identical IDs. If `False`, no merging is performed. Otherwise, a merge is performed with the ``fill_value`` provided to this parameter. For details, see the docstring of :meth:`obspy.core.stream.Stream.trim` trim_fill_value (bool, int, float, or None): Controls trimming of the output :class:`~obspy.core.stream.Stream`, useful if precisely uniform start and end times are desired. If `False`, no trimming is performed. Otherwise, a trim is performed with the ``fill_value`` provided to this parameter. For details, see the docstring of :meth:`obspy.core.stream.Stream.merge` remove_response (bool): Toggle response removal via :meth:`~obspy.core.trace.Trace.remove_sensitivity` or a simple scalar multiplication return_failed_stations (bool): If `True`, returns a list of station codes that were requested but not downloaded. This disables the standard failed station warning message watc_url (str): URL for WATC FDSN server watc_username (str): Username for WATC FDSN server watc_password (str): Password for WATC FDSN server Returns: :class:`~obspy.core.stream.Stream` containing gathered waveforms. If `return_failed_stations` is `True`, additionally returns a list containing station codes that were requested but not downloaded """ # Check for issues with fill value args if merge_fill_value is True or trim_fill_value is True: raise ValueError('Cannot provide True to fill value parameters.') print('--------------') print('GATHERING DATA') print('--------------') # IRIS FDSN if source == 'IRIS': client = FDSN_Client('IRIS') print('Reading data from IRIS FDSN...') try: st_out = client.get_waveforms(network, station, location, channel, starttime, endtime + time_buffer, attach_response=True) except FDSNNoDataException: st_out = Stream() # Just create an empty Stream object # WATC FDSN elif source == 'WATC': print('Connecting to WATC FDSN...') client = FDSN_Client(base_url=watc_url, user=watc_username, password=watc_password) print('Successfully connected. Reading data from WATC FDSN...') try: st_out = client.get_waveforms(network, station, location, channel, starttime, endtime + time_buffer, attach_response=True) except FDSNNoDataException: st_out = Stream() # Just create an empty Stream object # AVO Winston elif source == 'AVO': client = EW_Client('pubavo1.wr.usgs.gov', port=16023) # 16023 is long-term print('Reading data from AVO Winston...') st_out = Stream() # Make empty Stream object to populate # Brute-force "dynamic grid search" over network/station/channel/location codes for nw in _restricted_matching('network', network, client): for sta in _restricted_matching('station', station, client, network=nw): for cha in _restricted_matching('channel', channel, client, network=nw, station=sta): for loc in _restricted_matching('location', location, client, network=nw, station=sta, channel=cha): try: st_out += client.get_waveforms( nw, sta, loc, cha, starttime, endtime + time_buffer) except KeyError: pass else: raise ValueError('Unrecognized source. Valid options are \'IRIS\', ' '\'WATC\', or \'AVO\'.') # Merge, if specified if merge_fill_value is not False: st_out.merge(fill_value=merge_fill_value) # Merge Traces with same ID warnings.warn(f'Merging with "fill_value={merge_fill_value}"', CollectionWarning) st_out.sort() # Check that all requested stations are present in Stream requested_stations = station.split(',') downloaded_stations = [tr.stats.station for tr in st_out] failed_stations = [] for sta in requested_stations: # The below check works with wildcards, but obviously cannot detect if # ALL stations corresponding to a given wildcard (e.g., O??K) were # downloaded. Thus, if careful station selection is desired, specify # each station explicitly and the below check will then be effective. if not fnmatch.filter(downloaded_stations, sta): if not return_failed_stations: # If we're not returning the failed stations, then show this # warning message to alert the user warnings.warn( f'Station {sta} not downloaded from {source} ' 'server for this time period.', CollectionWarning) failed_stations.append(sta) # If the Stream is empty, then we can stop here if st_out.count() == 0: print('No data downloaded.') if return_failed_stations: return st_out, failed_stations else: return st_out # Otherwise, show what the Stream contains print(st_out.__str__(extended=True)) # This syntax prints the WHOLE Stream # Trim, if specified if trim_fill_value is not False: st_out.trim(starttime, endtime + time_buffer, pad=True, fill_value=trim_fill_value) warnings.warn(f'Trimming with "fill_value={trim_fill_value}"', CollectionWarning) print('Assigning coordinates...') # Use IRIS inventory info for AVO data source if source == 'AVO': client = FDSN_Client('IRIS') try: inv = client.get_stations(network=network, station=station, location=location, channel=channel, starttime=starttime, endtime=endtime + time_buffer, level='channel') except FDSNNoDataException: inv = Inventory() # Make an empty inv warnings.warn('Creating empty inventory.', CollectionWarning) for tr in st_out: try: coords = inv.get_coordinates(tr.id) tr.stats.longitude = coords['longitude'] tr.stats.latitude = coords['latitude'] tr.stats.elevation = coords['elevation'] except Exception as e: if str(e) == 'No matching channel metadata found.': warnings.warn(f'No metadata for {tr.id} found in inventory.', CollectionWarning) else: raise # Check if any Trace did NOT get coordinates assigned, and try to use JSON # coordinates if available for tr in st_out: try: tr.stats.longitude, tr.stats.latitude, tr.stats.elevation except AttributeError: try: tr.stats.latitude, tr.stats.longitude,\ tr.stats.elevation = AVO_COORDS[tr.id] warnings.warn(f'Using coordinates from JSON file for {tr.id}.', CollectionWarning) except KeyError: print(f'No coordinates available for {tr.id}. Stopping.') raise # Remove sensitivity if remove_response: print('Removing sensitivity...') for tr in st_out: try: # Just removing sensitivity for now. remove_response() can lead # to errors. This should be sufficient for now. Plus some # IRIS-AVO responses are wonky. tr.remove_sensitivity() except ValueError: # No response information found # This is only set up for infrasound calibration values try: calib = AVO_INFRA_CALIBS[tr.id] tr.data = tr.data * calib warnings.warn( 'Using calibration value from JSON file for ' f'{tr.id}.', CollectionWarning) except KeyError: print(f'No calibration value available for {tr.id}. ' 'Stopping.') raise print('Done') # Return the Stream with coordinates attached (and responses removed if # specified) if return_failed_stations: return st_out, failed_stations else: return st_out
if __name__ == "__main__": # Parameters level = "channel" # channel, station write_to = "2P_BEACON_Dataless.xml" export_to_specfem = False export_to_seed_fmt = False plot = False # Create the Inventory master_inventory = Inventory( networks=[ # geonet_south_island(level=level), # geonet_north_island(level=level), # hobitss(level=level), # sahke(level=level), # bannister(network_code="ZX", level=level), # bannister(network_code="Z8", level=level), beacon(level=level) ], source="PYATOA") # Export to various output formats if write_to: master_inventory.write(write_to, format="STATIONXML") if export_to_specfem: export_specfem(master_inventory) if export_to_seed_fmt: export_seed_fmt(master_inventory) if plot: master_inventory.plot(projection="local", resolution="l",
@author: jpeacock """ from obspy import Inventory from obspy.core.inventory import Network from obspy.core.util import AttribDict ns = "http://some-page.de/xmlns/1.0" Channel = AttribDict() Channel.namespace = ns Channel.value = AttribDict() Channel.value.my_nested_tag1 = AttribDict() Channel.value.my_nested_tag1.namespace = ns Channel.value.my_nested_tag1.value = 1.23e10 Channel.value.my_nested_tag2 = AttribDict() Channel.value.my_nested_tag2.namespace = ns Channel.value.my_nested_tag2.value = True inv = Inventory([Network("XX")], "XX") inv[0].extra = AttribDict() inv[0].extra.Channel = Channel inv.write( "my_inventory.xml", format="STATIONXML", nsmap={"somepage_ns": "http://some-page.de/xmlns/1.0"}, )
response_file = "./inputs/Z7_dataless.xml" # --- Set network code & client --- network = "Z7" datacentre = "IRIS" client = Client(datacentre) # --- Set time period over which download data --- starttime = UTCDateTime("2014-236T00:00:00") endtime = UTCDateTime("2014-236T00:15:00") # --- Read in station file --- stations = read_stations(station_file) # --- Download instrument response inventory --- inv = Inventory() for station in stations["Name"]: inv += client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, level="response") inv.write(response_file, format="STATIONXML") # --- Make directories to store waveform data --- waveform_path = data_path / str(starttime.year) / f"{starttime.julday:03d}" waveform_path.mkdir(parents=True, exist_ok=True) # --- Download waveform data --- for station in stations["Name"]: print(f"Downloading waveform data for station {station} from {datacentre}")
def obspyck_from_local(config_file, inv_paths, location, wav_dir=None, catalog=None, wav_file=None, cassm=False, rotate=False, length=0.03, prepick=0.003, pick_error=0.0001): """ Function to take local catalog, inventory and waveforms for picking. This has been gutted from scripts.python.workflow.obspyck_util for use with SURF/FS-B networks. :param inv: list of paths to StationXML files :param wav_dir: Directory of mseeds named according to timestamp eid convention :param catalog: catalog of events to pick (optional) :param wav_file: If not passing a directory, pass single waveform file path :param cassm: Bool for string parsing of cassm event files :param rotate: If orientation information is saved in the inventory, rotate the channels into ZNE. Defaults to False. :param length: Length (seconds) of wave to plot :param prepick: Seconds before pick of wav to plot :param pick_error: Default pick error to assign if none exists :return: """ # Sort network name if location == 'cascadia': net = 'UW' elif location == '4100': net = 'CB' else: net = 'SV' # Grab all stationxml files inv = Inventory() for inv_f in inv_paths: inv += read_inventory(inv_f) # For the case of a single wav file with no catalog (probably a stack) if not catalog and wav_file: st = read(wav_file) st.traces.sort(key=lambda x: x.stats.starttime) # sort first utcdto = st[0].stats.starttime root = ['obspyck -c {} -t {} -d {} -s {}'.format(config_file, utcdto - prepick, length, net)] cmd = ' '.join(root + [wav_file] + inv_paths) print(cmd) call(cmd, shell=True) return all_wavs = glob('{}/*'.format(wav_dir)) # Sort events, although they should already be sorted and it doesnt matter catalog.events.sort(key=lambda x: x.origins[-1].time) if len(catalog) == 0: print('No events in catalog') return ## Old workflow for SURF 4850 # eids = [parse_resource_id_to_eid(ev, method=location) for ev in catalog] # wav_files = [ # p for p in all_wavs # if parse_filenames_to_eid(p, method=location, cassm=cassm) in eids] eids = [ev.resource_id.id.split('/')[-1] for ev in catalog] wav_files = [f for f in all_wavs if f.split('/')[-1].rstrip('.ms') in eids] if not os.path.isdir('tmp'): os.mkdir('tmp') for ev in catalog: pk1 = min([pk.time for pk in ev.picks]) # eid = parse_resource_id_to_eid(ev, method=location) # wav_file = [ # f for f in wav_files if parse_filenames_to_eid(f, method=location, # cassm=cassm) == eid] eid = ev.resource_id.id.split('/')[-1] wav_file = [f for f in wav_files if f.split('/')[-1].rstrip('.ms') == eid] # Create temporary mseed without the superfluous non-seis traces try: st = read(wav_file[0]) except IndexError as e: print('No waveform for this event') continue # Vibbox specific channels, not for picking rms = [tr for tr in st if tr.stats.station in ['CMon', 'CTrig', 'CEnc', 'PPS']] for rm in rms: st.traces.remove(rm) tmp_wav_file = ['tmp/tmp_wav.mseed'] if rotate: # Rotate to ZNE not in obspyck so do it here. rotated_st = rotate_channels(st, inv) rotated_st.write(tmp_wav_file[0], format="MSEED") else: st.write(tmp_wav_file[0], format="MSEED") # If not pick uncertainties, assign some arbitrary ones for pk in ev.picks: if not pk.time_errors: pk.time_errors.uncertainty = pick_error tmp_name = 'tmp/{}_repicked.xml'.format(eid) # tmp_name = 'tmp/{}.xml'.format( # parse_resource_id_to_eid(ev, method=location)) ev.write(tmp_name, format='QUAKEML') print('Launching obspyck for ev: {}' .format( str(ev.resource_id).split('/')[-1])) root = ['obspyck -c {} -t {} -d {} -s {} --event {}'.format( config_file, pk1 - prepick, length, net, tmp_name)] cmd = ' '.join(root + tmp_wav_file + inv_paths) print(cmd) call(cmd, shell=True) return
def get_inventory( client, tribe: Union[RealTimeTribe, Tribe], triggering_event: Event = None, location: dict = None, starttime: UTCDateTime = None, max_distance: float = 1000., n_stations: int = 10, duration: float = 10, level: str = "channel", channel_list: Union[list, tuple] = ("EH?", "HH?"), ) -> Inventory: """ Get a suitable inventory for a tribe - selects the most used, closest stations. Parameters ---------- client: Obspy client with a get_stations service. tribe: Tribe or RealTimeTribe of templates to query for stations. triggering_event: Event with at least an origin to calculate distances from - if not specified will use `location` location: Dictionary with "latitude" and "longitude" keys - only used if `triggering event` is not specified. starttime: Start-time for station search - only used if `triggering_event` is not specified. max_distance: Maximum distance from `triggering_event.preferred_origin` or `location` to find stations. Units: km n_stations: Maximum number of stations to return duration: Duration stations must be active for. Units: days level: Level for inventory parsable by `client.get_stations`. channel_list List of channel-codes to be acquired. If `None` then all channels will be searched. Returns ------- Inventory of the most used, closest stations. """ inv = Inventory(networks=[], source=None) if triggering_event is not None: try: origin = ( triggering_event.preferred_origin() or triggering_event.origins[0]) except IndexError: Logger.error("Triggering event has no origin") return inv lat = origin.latitude lon = origin.longitude _starttime = origin.time else: lat = location["latitude"] lon = location["longitude"] _starttime = starttime for channel_str in channel_list or ["*"]: try: inv += client.get_stations( startbefore=_starttime, endafter=_starttime + (duration * 86400), channel=channel_str, latitude=lat, longitude=lon, maxradius=kilometer2degrees(max_distance), level=level) except FDSNNoDataException: continue if len(inv) == 0: return inv # Calculate distances station_count = Counter( [pick.waveform_id.station_code for template in tribe for pick in template.event.picks]) sta_dist = [] for net in inv: for sta in net: dist = locations2degrees( lat1=lat, long1=lon, lat2=sta.latitude, long2=sta.longitude) sta_dist.append((sta.code, dist, station_count[sta.code])) sta_dist.sort(key=lambda _: (-_[2], _[1])) inv_out = inv.select(station=sta_dist[0][0]) for sta in sta_dist[1:n_stations]: inv_out += inv.select(station=sta[0]) return inv_out
from eqcorrscan.core.match_filter import Tribe from obspy import Catalog import os from glob import glob from obspy import Stream, read import sys from obspy.taup.taup_geo import calc_dist from obspy.geodetics.base import degrees2kilometers from obspy.geodetics.base import gps2dist_azimuth from obspy import read_inventory, Inventory inv = Inventory() for f in glob("/home/genevieve.savard/hypoDDpy/station_files/*.xml"): inv += read_inventory(f) def plot_3cols(stream, event): stations = list(set([tr.stats.station for tr in stream])) nsta = len(stations) plt.rcParams['figure.figsize'] = [15, 15] fig, axs = plt.subplots(nsta, 3, sharex=True, figsize=(24, nsta * 5)) for i, sta in enumerate(stations): trn = stream.select(station=sta, channel="DPN")[0] tre = stream.select(station=sta, channel="DPE")[0] trz = stream.select(station=sta, channel="DPZ")[0] tplt = trn.times("matplotlib") axs[i][0].plot_date(tplt, trn.data, "k")