Beispiel #1
0
    def getWaveform(self, network, station, location=None, channel=None,
                    starttime=None, endtime=None, apply_filter=None,
                    getPAZ=False, getCoordinates=False,
                    metadata_timecheck=True, **kwargs):
        """
        Gets a ObsPy Stream object.

        :type network: str
        :param network: Network code, e.g. ``'BW'``.
        :type station: str
        :param station: Station code, e.g. ``'MANZ'``.
        :type location: str
        :param location: Location code, e.g. ``'00'``.
        :type channel: str
        :param channel: Channel code, supporting wildcard for component,
            e.g. ``'EHE'`` or ``'EH*'``.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Start date and time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: End date and time.
        :type apply_filter: bool, optional
        :param apply_filter: Apply filter (default is ``False``).
        :type getPAZ: bool, optional
        :param getPAZ: Fetch PAZ information and append to
            :class:`~obspy.core.trace.Stats` of all fetched traces. This
            considerably slows down the request (default is ``False``).
        :type getCoordinates: bool, optional
        :param getCoordinates: Fetch coordinate information and append to
            :class:`~obspy.core.trace.Stats` of all fetched traces. This
            considerably slows down the request (default is ``False``).
        :type metadata_timecheck: bool, optional
        :param metadata_timecheck: For ``getPAZ`` and ``getCoordinates`` check
            if metadata information is changing from start to end time. Raises
            an Exception if this is the case. This can be deactivated to save
            time.
        :rtype: :class:`~obspy.core.stream.Stream`
        :return: A ObsPy Stream object.
        """
        # NOTHING goes ABOVE this line!
        # append all args to kwargs, thus having everything in one dictionary
        for key, value in locals().items():
            if key not in ["self", "kwargs"]:
                kwargs[key] = value

        # allow time strings in arguments
        for time in ["starttime", "endtime"]:
            if isinstance(kwargs[time], (str, native_str)):
                kwargs[time] = UTCDateTime(kwargs[time])

        trim_start = kwargs['starttime']
        trim_end = kwargs['endtime']
        # we expand the requested timespan on both ends by two samples in
        # order to be able to make use of the nearest_sample option of
        # stream.trim(). (see trim() and tickets #95 and #105)
        # only possible if a channel is specified otherwise delta = 0
        delta = 2 * guessDelta(kwargs['channel'])
        kwargs['starttime'] = trim_start - delta
        kwargs['endtime'] = trim_end + delta

        url = '/seismology/waveform/getWaveform'
        data = self.client._fetch(url, **kwargs)
        if not data:
            raise Exception("No waveform data available")
        # unpickle
        stream = _unpickle(data)
        if len(stream) == 0:
            raise Exception("No waveform data available")
        stream._cleanup()

        # trimming needs to be done only if we extend the datetime above
        if channel:
            stream.trim(trim_start, trim_end)
        if getPAZ:
            for tr in stream:
                paz = self.client.station.getPAZ(seed_id=tr.id,
                                                 datetime=starttime)
                if metadata_timecheck:
                    paz_check = self.client.station.getPAZ(seed_id=tr.id,
                                                           datetime=endtime)
                    if paz != paz_check:
                        msg = "PAZ information changing from start time to" + \
                              " end time."
                        raise Exception(msg)
                tr.stats['paz'] = paz

        if getCoordinates:
            coords = self.client.station.getCoordinates(
                network=network, station=station, location=location,
                datetime=starttime)
            if metadata_timecheck:
                coords_check = self.client.station.getCoordinates(
                    network=network, station=station,
                    location=location, datetime=endtime)
                if coords != coords_check:
                    msg = "Coordinate information changing from start " + \
                          "time to end time."
                    raise Exception(msg)
            for tr in stream:
                tr.stats['coordinates'] = coords.copy()
        return stream
Beispiel #2
0
    def saveWaveform(self, filename, network, station, location, channel,
                     starttime, endtime, format="MSEED"):
        """
        Writes a retrieved waveform directly into a file.

        This method ensures the storage of the unmodified waveform data
        delivered by the NERIES Web service, e.g. preserving the record based
        quality flags of MiniSEED files which would be neglected reading it
        with obspy.mseed.

        :type filename: str
        :param filename: Name of the output file.
        :type network: str
        :param network: Network code, e.g. ``'BW'``.
        :type station: str
        :param station: Station code, e.g. ``'MANZ'``.
        :type location: str
        :param location: Location code, e.g. ``'01'``. Location code may
            contain wild cards.
        :type channel: str
        :param channel: Channel code, e.g. ``'EHE'``. . Channel code may
            contain wild cards.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Start date and time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: End date and time.
        :type format: ``'FSEED'`` or ``'MSEED'``, optional
        :param format: Output format. Either as full SEED (``'FSEED'``) or
            Mini-SEED (``'MSEED'``) volume. Defaults to ``'MSEED'``.
        :return: None

        .. seealso:: http://www.orfeus-eu.org/wsdl/seismolink/seismolink.wsdl

        .. rubric:: Example

        >>> from obspy.neries import Client
        >>> c = Client(user='******')
        >>> dt = UTCDateTime("2009-04-01T00:00:00")
        >>> st = c.saveWaveform("outfile.fseed", "NL", "WIT", "", "BH*",
        ...                     dt, dt+30, format="FSEED")  #doctest: +SKIP
        """
        # enable logging if debug option is set
        if self.debug:
            import logging
            logging.basicConfig(level=logging.INFO)
            logging.getLogger('suds.client').setLevel(logging.DEBUG)
        # initialize client
        client = SudsClient(SEISMOLINK_WSDL)
        # set cache of 5 days
        cache = client.options.cache
        cache.setduration(days=5)
        # create user token
        usertoken = client.factory.create('UserTokenType')
        usertoken.email = self.user
        usertoken.password = self.password
        usertoken.label = self.user_agent.replace(' ', '_')
        usertoken.locale = ""
        # create station filter
        stationid = client.factory.create('StationIdentifierType')
        stationid.NetworkCode = network
        stationid.StationCode = station
        stationid.ChannelCode = channel
        stationid.LocId = location
        # adding default record length (4096) * delta to start and end time to
        # ensure right date times
        # XXX: 4096 may be overkill
        delta = guessDelta(channel) * 4096
        stationid.TimeSpan.TimePeriod.beginPosition = \
            (UTCDateTime(starttime) - delta).strftime("%Y-%m-%dT%H:%M:%S")
        stationid.TimeSpan.TimePeriod.endPosition = \
            (UTCDateTime(endtime) + delta).strftime("%Y-%m-%dT%H:%M:%S")
        # request data
        if format == 'MSEED':
            client.options.plugins = [_AttributePlugin({'DataFormat':'MSEED'})]
        # start data request
        response = client.service.dataRequest(usertoken, stationid)
        client.options.plugins = []
        # filter for request ids
        request_ids = [r._Id for r in response.RoutedRequest]
        if not request_ids:
            return
        # check status using request ids
        _loops = 0
        while True:
            response = client.service.checkStatus(usertoken, request_ids)
            status = [r.ReadyFlag for r in response.RoutedRequest]
            # if we hit MAX_REQUESTS break the loop
            if _loops > MAX_REQUESTS:
                msg = 'MAX_REQUESTS exceeded - breaking current request loop'
                warnings.warn(msg, UserWarning)
                break
            if "false" in status:
                # retry until all are set to 'true'
                _loops += 1
                continue
            break
        # keep only request ids which are fulfilled and have 'status = OK'
        request_ids = [r._Id for r in response.RoutedRequest
                       if 'Status: OK' in r.StatusDescription
                       and r.Fulfillment == 100]
        if not request_ids:
            return
        # retrieve download URLs using request ids
        response = client.service.dataRetrieve(usertoken, request_ids)
        urls = [r.DownloadToken.DownloadURL for r in response.DataItem]
        # create file handler if a file name is given
        if isinstance(filename, basestring):
            fh = open(filename, "wb")
        elif isinstance(filename, file):
            fh = filename
        else:
            msg = "Parameter filename must be either string or file handler."
            raise TypeError(msg)
        for url in urls:
            fh.write(urllib2.urlopen(url).read())
        if isinstance(filename, basestring):
            fh.close()
        # clean up
        response = client.service.purgeData(usertoken, request_ids)
Beispiel #3
0
    def saveWaveform(self,
                     filename,
                     network,
                     station,
                     location,
                     channel,
                     starttime,
                     endtime,
                     format="MSEED"):
        """
        Writes a retrieved waveform directly into a file.

        This method ensures the storage of the unmodified waveform data
        delivered by the NERIES Web service, e.g. preserving the record based
        quality flags of MiniSEED files which would be neglected reading it
        with obspy.mseed.

        :type filename: str
        :param filename: Name of the output file.
        :type network: str
        :param network: Network code, e.g. ``'BW'``.
        :type station: str
        :param station: Station code, e.g. ``'MANZ'``.
        :type location: str
        :param location: Location code, e.g. ``'01'``. Location code may
            contain wild cards.
        :type channel: str
        :param channel: Channel code, e.g. ``'EHE'``. . Channel code may
            contain wild cards.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Start date and time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: End date and time.
        :type format: ``'FSEED'`` or ``'MSEED'``, optional
        :param format: Output format. Either as full SEED (``'FSEED'``) or
            Mini-SEED (``'MSEED'``) volume. Defaults to ``'MSEED'``.
        :return: None

        .. seealso:: http://www.orfeus-eu.org/wsdl/seismolink/seismolink.wsdl

        .. rubric:: Example

        >>> from obspy.neries import Client
        >>> c = Client(user='******')
        >>> dt = UTCDateTime("2009-04-01T00:00:00")
        >>> st = c.saveWaveform("outfile.fseed", "NL", "WIT", "", "BH*",
        ...                     dt, dt+30, format="FSEED")  #doctest: +SKIP
        """
        # enable logging if debug option is set
        if self.debug:
            import logging
            logging.basicConfig(level=logging.INFO)
            logging.getLogger('suds.client').setLevel(logging.DEBUG)
        # initialize client
        client = SudsClient(SEISMOLINK_WSDL)
        # set cache of 5 days
        cache = client.options.cache
        cache.setduration(days=5)
        # create user token
        usertoken = client.factory.create('UserTokenType')
        usertoken.email = self.user
        usertoken.password = self.password
        usertoken.label = self.user_agent.replace(' ', '_')
        usertoken.locale = ""
        # create station filter
        stationid = client.factory.create('StationIdentifierType')
        stationid.NetworkCode = network
        stationid.StationCode = station
        stationid.ChannelCode = channel
        stationid.LocId = location
        # adding default record length (4096) * delta to start and end time to
        # ensure right date times
        # XXX: 4096 may be overkill
        delta = guessDelta(channel) * 4096
        stationid.TimeSpan.TimePeriod.beginPosition = \
            (UTCDateTime(starttime) - delta).strftime("%Y-%m-%dT%H:%M:%S")
        stationid.TimeSpan.TimePeriod.endPosition = \
            (UTCDateTime(endtime) + delta).strftime("%Y-%m-%dT%H:%M:%S")
        # request data
        if format == 'MSEED':
            client.options.plugins = \
                [_AttributePlugin({'DataFormat': 'MSEED'})]
        # start data request
        response = client.service.dataRequest(usertoken, stationid)
        client.options.plugins = []
        # filter for request ids
        request_ids = [r._Id for r in response.RoutedRequest]
        if not request_ids:
            return
        # check status using request ids
        _loops = 0
        while True:
            response = client.service.checkStatus(usertoken, request_ids)
            status = [r.ReadyFlag for r in response.RoutedRequest]
            # if we hit MAX_REQUESTS break the loop
            if _loops > MAX_REQUESTS:
                msg = 'MAX_REQUESTS exceeded - breaking current request loop'
                warnings.warn(msg, UserWarning)
                break
            if "false" in status:
                # retry until all are set to 'true'
                _loops += 1
                continue
            break
        # keep only request ids which are fulfilled and have 'status = OK'
        request_ids = [
            r._Id for r in response.RoutedRequest
            if 'Status: OK' in r.StatusDescription and r.Fulfillment == 100
        ]
        if not request_ids:
            return
        # retrieve download URLs using request ids
        response = client.service.dataRetrieve(usertoken, request_ids)
        urls = [r.DownloadToken.DownloadURL for r in response.DataItem]
        # create file handler if a file name is given
        if isinstance(filename, basestring):
            fh = open(filename, "wb")
        elif isinstance(filename, file):
            fh = filename
        else:
            msg = "Parameter filename must be either string or file handler."
            raise TypeError(msg)
        for url in urls:
            fh.write(urllib2.urlopen(url).read())
        if isinstance(filename, basestring):
            fh.close()
        # clean up
        response = client.service.purgeData(usertoken, request_ids)
Beispiel #4
0
    def getWaveform(self,
                    network,
                    station,
                    location=None,
                    channel=None,
                    starttime=None,
                    endtime=None,
                    apply_filter=None,
                    getPAZ=False,
                    getCoordinates=False,
                    metadata_timecheck=True,
                    **kwargs):
        """
        Gets a ObsPy Stream object.

        :type network: str
        :param network: Network code, e.g. ``'BW'``.
        :type station: str
        :param station: Station code, e.g. ``'MANZ'``.
        :type location: str
        :param location: Location code, e.g. ``'00'``.
        :type channel: str
        :param channel: Channel code, supporting wildcard for component,
            e.g. ``'EHE'`` or ``'EH*'``.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Start date and time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: End date and time.
        :type apply_filter: bool, optional
        :param apply_filter: Apply filter (default is ``False``).
        :type getPAZ: bool, optional
        :param getPAZ: Fetch PAZ information and append to
            :class:`~obspy.core.trace.Stats` of all fetched traces. This
            considerably slows down the request (default is ``False``).
        :type getCoordinates: bool, optional
        :param getCoordinates: Fetch coordinate information and append to
            :class:`~obspy.core.trace.Stats` of all fetched traces. This
            considerably slows down the request (default is ``False``).
        :type metadata_timecheck: bool, optional
        :param metadata_timecheck: For ``getPAZ`` and ``getCoordinates`` check
            if metadata information is changing from start to end time. Raises
            an Exception if this is the case. This can be deactivated to save
            time.
        :rtype: :class:`~obspy.core.stream.Stream`
        :return: A ObsPy Stream object.
        """
        # NOTHING goes ABOVE this line!
        # append all args to kwargs, thus having everything in one dictionary
        for key, value in locals().items():
            if key not in ["self", "kwargs"]:
                kwargs[key] = value

        # allow time strings in arguments
        for time_ in ["starttime", "endtime"]:
            if isinstance(kwargs[time_], (str, native_str)):
                kwargs[time_] = UTCDateTime(kwargs[time_])

        trim_start = kwargs['starttime']
        trim_end = kwargs['endtime']
        # we expand the requested timespan on both ends by two samples in
        # order to be able to make use of the nearest_sample option of
        # stream.trim(). (see trim() and tickets #95 and #105)
        # only possible if a channel is specified otherwise delta = 0
        delta = 2 * guessDelta(kwargs['channel'])
        kwargs['starttime'] = trim_start - delta
        kwargs['endtime'] = trim_end + delta

        url = '/seismology/waveform/getWaveform'
        data = self.client._fetch(url, **kwargs)
        if not data:
            raise Exception("No waveform data available")
        # unpickle
        stream = _unpickle(data)
        if len(stream) == 0:
            raise Exception("No waveform data available")
        stream._cleanup()

        # trimming needs to be done only if we extend the datetime above
        if channel:
            stream.trim(trim_start, trim_end)
        if getPAZ:
            for tr in stream:
                paz = self.client.station.getPAZ(seed_id=tr.id,
                                                 datetime=starttime)
                if metadata_timecheck:
                    paz_check = self.client.station.getPAZ(seed_id=tr.id,
                                                           datetime=endtime)
                    if paz != paz_check:
                        msg = "PAZ information changing from start time to" + \
                              " end time."
                        raise Exception(msg)
                tr.stats['paz'] = paz

        if getCoordinates:
            coords = self.client.station.getCoordinates(network=network,
                                                        station=station,
                                                        location=location,
                                                        datetime=starttime)
            if metadata_timecheck:
                coords_check = self.client.station.getCoordinates(
                    network=network,
                    station=station,
                    location=location,
                    datetime=endtime)
                if coords != coords_check:
                    msg = "Coordinate information changing from start " + \
                          "time to end time."
                    raise Exception(msg)
            for tr in stream:
                tr.stats['coordinates'] = coords.copy()
        return stream