def test_trace():
    data = np.random.rand(1000)
    header = {'sampling_rate': 1,
              'npts': len(data),
              'network': 'US',
              'location': '11',
              'station': 'ABCD',
              'channel': 'HN1',
              'starttime': UTCDateTime(2010, 1, 1, 0, 0, 0)}
    inventory = get_inventory()
    invtrace = StationTrace(data=data,
                            header=header, inventory=inventory)
    invtrace.setProvenance('detrend', {'detrending_method': 'demean'})
    invtrace.setParameter('failed', True)
    invtrace.setParameter('corner_frequencies', [1, 2, 3])
    invtrace.setParameter('metadata', {'name': 'Fred'})

    assert invtrace.getProvenance('detrend')[0] == {
        'detrending_method': 'demean'}
    assert invtrace.getParameter('failed')
    assert invtrace.getParameter('corner_frequencies') == [1, 2, 3]
    assert invtrace.getParameter('metadata') == {'name': 'Fred'}
Beispiel #2
0
def test_trace():
    data = np.random.rand(1000)
    header = {
        'sampling_rate': 1,
        'npts': len(data),
        'network': 'US',
        'location': '11',
        'station': 'ABCD',
        'channel': 'HN1',
        'starttime': UTCDateTime(2010, 1, 1, 0, 0, 0)
    }
    inventory = get_inventory()
    invtrace = StationTrace(data=data, header=header, inventory=inventory)
    invtrace.setProvenance('detrend', {'detrending_method': 'demean'})
    invtrace.setParameter('failed', True)
    invtrace.setParameter('corner_frequencies', [1, 2, 3])
    invtrace.setParameter('metadata', {'name': 'Fred'})

    assert invtrace.getProvenance('detrend')[0] == {
        'detrending_method': 'demean'
    }
    assert invtrace.getParameter('failed')
    assert invtrace.getParameter('corner_frequencies') == [1, 2, 3]
    assert invtrace.getParameter('metadata') == {'name': 'Fred'}
    def getStreams(self, eventid, stations=None, labels=None):
        """Get Stream from ASDF file given event id and input tags.

        Args:
            eventid (str):
                Event ID corresponding to an Event in the workspace.
            stations (list):
                List of stations to search for.
            labels (list):
                List of processing labels to search for.

        Returns:
            StreamCollection: Object containing list of organized
            StationStreams.
        """
        trace_auxholder = []
        stream_auxholder = []
        if 'TraceProcessingParameters' in self.dataset.auxiliary_data:
            trace_auxholder = self.dataset.auxiliary_data.TraceProcessingParameters
        if 'StreamProcessingParameters' in self.dataset.auxiliary_data:
            stream_auxholder = self.dataset.auxiliary_data.StreamProcessingParameters
        streams = []
        all_tags = []

        if stations is None:
            stations = self.getStations(eventid)
        if labels is None:
            labels = self.getLabels()
        for station in stations:
            for label in labels:
                all_tags.append('%s_%s_%s' % (eventid, station.lower(), label))

        for waveform in self.dataset.waveforms:
            ttags = waveform.get_waveform_tags()
            wtags = []
            if not len(all_tags):
                wtags = ttags
            else:
                wtags = list(set(all_tags).intersection(set(ttags)))
            for tag in wtags:
                if eventid in waveform[tag][0].stats.asdf.event_ids:
                    tstream = waveform[tag].copy()
                    inventory = waveform['StationXML']
                    for ttrace in tstream:
                        trace = StationTrace(data=ttrace.data,
                                             header=ttrace.stats,
                                             inventory=inventory)
                        tpl = (trace.stats.network.lower(),
                               trace.stats.station.lower(),
                               trace.stats.channel.lower())
                        channel = '%s_%s_%s' % tpl
                        channel_tag = '%s_%s' % (tag, channel)
                        if channel_tag in self.dataset.provenance.list():
                            provdoc = self.dataset.provenance[channel_tag]
                            trace.setProvenanceDocument(provdoc)
                        trace_path = '%s_%s' % (
                            tag, trace.stats.channel)
                        if trace_path in trace_auxholder:
                            bytelist = trace_auxholder[
                                trace_path].data[:].tolist()
                            jsonstr = ''.join([chr(b) for b in bytelist])
                            jdict = json.loads(jsonstr)
                            for key, value in jdict.items():
                                trace.setParameter(key, value)

                        stream = StationStream(traces=[trace])
                        stream.tag = tag  # testing this out

                        # look for stream-based metadata
                        if tag in stream_auxholder:
                            bytelist = stream_auxholder[
                                tag].data[:].tolist()
                            jsonstr = ''.join([chr(b) for b in bytelist])
                            jdict = json.loads(jsonstr)
                            for key, value in jdict.items():
                                stream.setStreamParam(key, value)

                        streams.append(stream)
        streams = StreamCollection(streams)
        return streams
Beispiel #4
0
    def getStreams(self, eventid, stations=None, labels=None):
        """Get Stream from ASDF file given event id and input tags.

        Args:
            eventid (str):
                Event ID corresponding to an Event in the workspace.
            stations (list):
                List of stations to search for.
            labels (list):
                List of processing labels to search for.

        Returns:
            StreamCollection: Object containing list of organized
            StationStreams.
        """
        trace_auxholder = []
        stream_auxholder = []
        if 'TraceProcessingParameters' in self.dataset.auxiliary_data:
            trace_auxholder = self.dataset.auxiliary_data.TraceProcessingParameters
        if 'StreamProcessingParameters' in self.dataset.auxiliary_data:
            stream_auxholder = self.dataset.auxiliary_data.StreamProcessingParameters
        streams = []

        if stations is None:
            stations = self.getStations(eventid)
        if labels is None:
            labels = self.getLabels()

        # tried doing a query here using dataset ifilter on event,
        # but it didn't work...
        for waveform in self.dataset.waveforms:
            tags = waveform.get_waveform_tags()
            for tag in tags:
                parts = tag.split('_')
                if len(parts) > 2:
                    tlabel = parts[-1]
                    teventid = '_'.join(parts[0:-1])
                else:
                    teventid, tlabel = tag.split('_')
                if eventid != teventid:
                    continue
                if tlabel not in labels:
                    continue
                stream_name = list(
                    waveform.get_waveform_attributes().keys())[0]
                parts = stream_name.split('.')
                tstation = parts[1]
                if tstation not in stations:
                    continue

                tstream = waveform[tag]

                inventory = waveform['StationXML']
                for ttrace in tstream:
                    trace = StationTrace(data=ttrace.data,
                                         header=ttrace.stats,
                                         inventory=inventory)

                    # get the provenance information
                    provname = format_nslct(trace.stats, tag)
                    if provname in self.dataset.provenance.list():
                        provdoc = self.dataset.provenance[provname]
                        trace.setProvenanceDocument(provdoc)

                    # get the trace processing parameters
                    top = format_netsta(trace.stats)
                    trace_path = format_nslct(trace.stats, tag)
                    if top in trace_auxholder:
                        root_auxholder = trace_auxholder[top]
                        if trace_path in root_auxholder:
                            bytelist = root_auxholder[
                                trace_path].data[:].tolist()
                            jsonstr = ''.join([chr(b) for b in bytelist])
                            jdict = json.loads(jsonstr)
                            for key, value in jdict.items():
                                trace.setParameter(key, value)

                    # get the trace spectra arrays from auxiliary,
                    # repack into stationtrace object
                    spectra = {}
                    if 'Cache' in self.dataset.auxiliary_data:
                        for aux in self.dataset.auxiliary_data['Cache'].list():
                            auxarray = self.dataset.auxiliary_data['Cache'][aux]
                            if top not in auxarray.list():
                                continue
                            auxarray_top = auxarray[top]
                            if trace_path in auxarray_top:
                                specparts = camel_case_split(aux)
                                array_name = specparts[-1].lower()
                                specname = '_'.join(specparts[:-1]).lower()
                                specarray = auxarray_top[trace_path].data.value
                                if specname in spectra:
                                    spectra[specname][array_name] = specarray
                                else:
                                    spectra[specname] = {array_name: specarray}
                        for key, value in spectra.items():
                            trace.setCached(key, value)

                    stream = StationStream(traces=[trace])
                    stream.tag = tag  # testing this out

                    # get the stream processing parameters
                    stream_path = format_nslit(trace.stats, stream.get_inst(), tag)
                    if top in stream_auxholder:
                        top_auxholder = stream_auxholder[top]
                        if stream_path in top_auxholder:
                            auxarray = top_auxholder[stream_path]
                            bytelist = auxarray.data[:].tolist()
                            jsonstr = ''.join([chr(b) for b in bytelist])
                            jdict = json.loads(jsonstr)
                            for key, value in jdict.items():
                                stream.setStreamParam(key, value)

                    streams.append(stream)
        streams = StreamCollection(streams)
        return streams
    def getStreams(self, eventid, stations=None, labels=None):
        """Get Stream from ASDF file given event id and input tags.

        Args:
            eventid (str):
                Event ID corresponding to an Event in the workspace.
            stations (list):
                List of stations to search for.
            labels (list):
                List of processing labels to search for.

        Returns:
            StreamCollection: Object containing list of organized
            StationStreams.
        """
        trace_auxholder = []
        stream_auxholder = []
        if 'TraceProcessingParameters' in self.dataset.auxiliary_data:
            trace_auxholder = self.dataset.auxiliary_data.TraceProcessingParameters
        if 'StreamProcessingParameters' in self.dataset.auxiliary_data:
            stream_auxholder = self.dataset.auxiliary_data.StreamProcessingParameters
        streams = []
        all_tags = []

        if stations is None:
            stations = self.getStations(eventid)
        if labels is None:
            labels = self.getLabels()
        for station in stations:
            for label in labels:
                all_tags.append('%s_%s' % (station.lower(), label))

        for waveform in self.dataset.waveforms:
            ttags = waveform.get_waveform_tags()
            wtags = []
            if not len(all_tags):
                wtags = ttags
            else:
                wtags = list(set(all_tags).intersection(set(ttags)))
            for tag in wtags:
                if eventid in waveform[tag][0].stats.asdf.event_ids:
                    tstream = waveform[tag].copy()
                    inventory = waveform['StationXML']
                    traces = []
                    for ttrace in tstream:
                        trace = StationTrace(data=ttrace.data,
                                             header=ttrace.stats,
                                             inventory=inventory)
                        tpl = (trace.stats.network.lower(),
                               trace.stats.station.lower(),
                               trace.stats.channel.lower())
                        channel = '%s_%s_%s' % tpl
                        channel_tag = '%s_%s' % (tag, channel)
                        if channel_tag in self.dataset.provenance.list():
                            provdoc = self.dataset.provenance[channel_tag]
                            trace.setProvenanceDocument(provdoc)
                        trace_path = '%s_%s_%s' % (eventid,
                                                   tag,
                                                   trace.stats.channel)
                        if trace_path in trace_auxholder:
                            bytelist = trace_auxholder[trace_path].data[:].tolist(
                            )
                            jsonstr = ''.join([chr(b) for b in bytelist])
                            jdict = json.loads(jsonstr)
                            # jdict = unstringify_dict(jdict)
                            for key, value in jdict.items():
                                trace.setParameter(key, value)

                        traces.append(trace)
                    stream = StationStream(traces=traces)
                    stream.tag = tag  # testing this out

                    # look for stream-based metadata
                    stream_path = '%s_%s' % (eventid, tag)
                    if stream_path in stream_auxholder:
                        bytelist = stream_auxholder[stream_path].data[:].tolist(
                        )
                        jsonstr = ''.join([chr(b) for b in bytelist])
                        jdict = json.loads(jsonstr)
                        # jdict = unstringify_dict(jdict)
                        for key, value in jdict.items():
                            stream.setStreamParam(key, value)

                    streams.append(stream)
        streams = StreamCollection(streams)
        return streams
Beispiel #6
0
    def getStreams(self, eventid, stations=None, labels=None, get_raw=False):
        """Get Stream from ASDF file given event id and input tags.

        Args:
            eventid (str):
                Event ID corresponding to an Event in the workspace.
            stations (list):
                List of stations to search for.
            labels (list):
                List of processing labels to search for.

        Returns:
            list: List of StationStream objects.
        """
        auxholder = []
        if 'ProcessingParameters' in self.dataset.auxiliary_data:
            auxholder = self.dataset.auxiliary_data.ProcessingParameters
        streams = []
        all_tags = []
        if not get_raw:
            if stations is None:
                stations = self.getStations(eventid)
            if labels is None:
                labels = self.getLabels()
            for station in stations:
                for label in labels:
                    all_tags.append('%s_%s' % (station.lower(), label))
        else:
            all_tags = ['raw_recording']
        for waveform in self.dataset.waveforms:
            ttags = waveform.get_waveform_tags()
            if not get_raw and 'raw_recording' in ttags:
                ttags.remove('raw_recording')
            wtags = []
            if not len(all_tags):
                wtags = ttags
            else:
                wtags = list(set(all_tags).intersection(set(ttags)))
            for tag in wtags:
                if eventid in waveform[tag][0].stats.asdf.event_ids:
                    tstream = waveform[tag].copy()
                    inventory = waveform['StationXML']
                    traces = []
                    for ttrace in tstream:
                        trace = StationTrace(data=ttrace.data,
                                             header=ttrace.stats,
                                             inventory=inventory)
                        tpl = (trace.stats.network.lower(),
                               trace.stats.station.lower(),
                               trace.stats.channel.lower())
                        channel = '%s_%s_%s' % tpl
                        channel_tag = '%s_%s' % (tag, channel)
                        if channel_tag in self.dataset.provenance.list():
                            provdoc = self.dataset.provenance[channel_tag]
                            trace.setProvenanceDocument(provdoc)
                        trace_path = '%s_%s' % (tag, trace.stats.channel)
                        if trace_path in auxholder:
                            bytelist = auxholder[trace_path].data[:].tolist()
                            jsonstr = ''.join([chr(b) for b in bytelist])
                            jdict = json.loads(jsonstr)
                            # jdict = unstringify_dict(jdict)
                            for key, value in jdict.items():
                                trace.setParameter(key, value)

                        traces.append(trace)
                    stream = StationStream(traces=traces)
                    stream.tag = tag  # testing this out
                    streams.append(stream)
        return streams