def test_trace(): data = np.random.rand(1000) header = { 'sampling_rate': 1, 'npts': len(data), 'network': 'US', 'location': '11', 'station': 'ABCD', 'channel': 'HN1', 'starttime': UTCDateTime(2010, 1, 1, 0, 0, 0) } inventory = get_inventory() invtrace = StationTrace(data=data, header=header, inventory=inventory) invtrace.setProvenance('detrend', {'detrending_method': 'demean'}) invtrace.setParameter('failed', True) invtrace.setParameter('corner_frequencies', [1, 2, 3]) invtrace.setParameter('metadata', {'name': 'Fred'}) assert invtrace.getProvenance('detrend')[0] == { 'detrending_method': 'demean' } assert invtrace.getParameter('failed') assert invtrace.getParameter('corner_frequencies') == [1, 2, 3] assert invtrace.getParameter('metadata') == {'name': 'Fred'} prov = invtrace.getProvSeries() assert prov[0] == 'demean'
def test_trace(): data = np.random.rand(1000) header = { "sampling_rate": 1, "npts": len(data), "network": "US", "location": "11", "station": "ABCD", "channel": "HN1", "starttime": UTCDateTime(2010, 1, 1, 0, 0, 0), } inventory = get_inventory() invtrace = StationTrace(data=data, header=header, inventory=inventory) invtrace.setProvenance("detrend", {"detrending_method": "demean"}) invtrace.setParameter("failed", True) invtrace.setParameter("corner_frequencies", [1, 2, 3]) invtrace.setParameter("metadata", {"name": "Fred"}) assert invtrace.getProvenance("detrend")[0] == { "detrending_method": "demean" } assert invtrace.getParameter("failed") assert invtrace.getParameter("corner_frequencies") == [1, 2, 3] assert invtrace.getParameter("metadata") == {"name": "Fred"} prov = invtrace.getProvSeries() assert prov[0] == "demean"
def getStreams(self, eventid, stations=None, labels=None, config=None): """Get Stream from ASDF file given event id and input tags. Args: eventid (str): Event ID corresponding to an Event in the workspace. stations (list): List of stations to search for. labels (list): List of processing labels to search for. config (dict): Configuration options. Returns: StreamCollection: Object containing list of organized StationStreams. """ trace_auxholder = [] stream_auxholder = [] auxdata = self.dataset.auxiliary_data if 'TraceProcessingParameters' in auxdata: trace_auxholder = auxdata.TraceProcessingParameters if 'StreamProcessingParameters' in auxdata: stream_auxholder = auxdata.StreamProcessingParameters streams = [] if stations is None: stations = self.getStations(eventid) if labels is None: labels = self.getLabels() for waveform in self.dataset.ifilter( self.dataset.q.station == stations, self.dataset.q.tag == ['%s_%s' % (eventid, label) for label in labels]): tags = waveform.get_waveform_tags() for tag in tags: tstream = waveform[tag] inventory = waveform['StationXML'] for ttrace in tstream: if isinstance(ttrace.data[0], np.floating): if ttrace.data[0].nbytes == 4: ttrace.data = ttrace.data.astype('float32') else: ttrace.data = ttrace.data.astype('float64') else: if ttrace.data[0].nbytes == 2: ttrace.data = ttrace.data.astype('int16') elif ttrace.data[0].nbytes == 4: ttrace.data = ttrace.data.astype('int32') else: ttrace.data = ttrace.data.astype('int64') trace = StationTrace(data=ttrace.data, header=ttrace.stats, inventory=inventory, config=config) # get the provenance information provname = format_nslct(trace.stats, tag) if provname in self.dataset.provenance.list(): provdoc = self.dataset.provenance[provname] trace.setProvenanceDocument(provdoc) # get the trace processing parameters top = format_netsta(trace.stats) trace_path = format_nslct(trace.stats, tag) if top in trace_auxholder: root_auxholder = trace_auxholder[top] if trace_path in root_auxholder: bytelist = root_auxholder[ trace_path].data[:].tolist() jsonstr = ''.join([chr(b) for b in bytelist]) jdict = json.loads(jsonstr) for key, value in jdict.items(): trace.setParameter(key, value) # get the trace spectra arrays from auxiliary, # repack into stationtrace object spectra = {} if 'Cache' in auxdata: for aux in auxdata['Cache'].list(): auxarray = auxdata['Cache'][aux] if top not in auxarray.list(): continue auxarray_top = auxarray[top] if trace_path in auxarray_top: specparts = camel_case_split(aux) array_name = specparts[-1].lower() specname = '_'.join(specparts[:-1]).lower() specarray = auxarray_top[trace_path].data[()] if specname in spectra: spectra[specname][array_name] = specarray else: spectra[specname] = {array_name: specarray} for key, value in spectra.items(): trace.setCached(key, value) stream = StationStream(traces=[trace]) stream.tag = tag # testing this out # get the stream processing parameters stream_path = format_nslit( trace.stats, stream.get_inst(), tag) if top in stream_auxholder: top_auxholder = stream_auxholder[top] if stream_path in top_auxholder: auxarray = top_auxholder[stream_path] bytelist = auxarray.data[:].tolist() jsonstr = ''.join([chr(b) for b in bytelist]) jdict = json.loads(jsonstr) for key, value in jdict.items(): stream.setStreamParam(key, value) streams.append(stream) # No need to handle duplicates when retrieving stations from the # workspace file because it must have been handled before putting them # into the workspace file. streams = StreamCollection( streams, handle_duplicates=False, config=config) return streams