Example #1
0
 def ping(self):
     """
     Ping the SeisHub server.
     """
     try:
         t1 = time.time()
         urlopen(self.base_url).read()
         return (time.time() - t1) * 1000.0
     except:
         None
Example #2
0
 def ping(self):
     """
     Ping the SeisHub server.
     """
     try:
         t1 = time.time()
         urlopen(self.base_url).read()
         return (time.time() - t1) * 1000.0
     except:
         None
Example #3
0
    def _HTTP_request(self, url, method, xml_string="", headers={}):
        """
        Send a HTTP request via urllib2.

        :type url: String
        :param url: Complete URL of resource
        :type method: String
        :param method: HTTP method of request, e.g. "PUT"
        :type headers: dict
        :param headers: Header information for request, e.g.
                {'User-Agent': "obspyck"}
        :type xml_string: String
        :param xml_string: XML for a send request (PUT/POST)
        """
        if method not in HTTP_ACCEPTED_METHODS:
            raise ValueError("Method must be one of %s" %
                             HTTP_ACCEPTED_METHODS)
        if method in HTTP_ACCEPTED_DATA_METHODS and not xml_string:
            raise TypeError("Missing data for %s request." % method)
        elif method in HTTP_ACCEPTED_NODATA_METHODS and xml_string:
            raise TypeError("Unexpected data for %s request." % method)

        req = _RequestWithMethod(method=method, url=url, data=xml_string,
                                 headers=headers)
        # it seems the following always ends in a HTTPError even with
        # nice status codes...?!?
        try:
            response = urlopen(req)
            return response.code, response.msg
        except HTTPError as e:
            return e.code, e.msg
Example #4
0
    def _fetch(self, service, data=None, headers={}, param_list=[], **params):
        """
        Send a HTTP request via urllib2.

        :type service: str
        :param service: Name of service
        :type data: str
        :param data: Channel list as returned by `availability` Web service
        :type headers: dict, optional
        :param headers: Additional header information for request
        """
        headers['User-Agent'] = self.user_agent
        # replace special characters
        remoteaddr = "/".join([self.base_url.rstrip("/"), service,
                               str(self.major_versions[service]), "query"])
        options = '&'.join(param_list)
        if params:
            if options:
                options += '&'
            options += compatibility.urlencode(params)
        if options:
            remoteaddr = "%s?%s" % (remoteaddr, options)
        if self.debug:
            print(('\nRequesting %s' % (remoteaddr)))
        req = compatibility.Request(url=remoteaddr, data=data, headers=headers)
        response = compatibility.urlopen(req, timeout=self.timeout)
        doc = response.read()
        return doc
Example #5
0
    def _fetch(self, service, data=None, headers={}, param_list=[], **params):
        """
        Send a HTTP request via urllib2.

        :type service: str
        :param service: Name of service
        :type data: str
        :param data: Channel list as returned by `availability` Web service
        :type headers: dict, optional
        :param headers: Additional header information for request
        """
        headers['User-Agent'] = self.user_agent
        # replace special characters
        remoteaddr = "/".join([self.base_url.rstrip("/"), service,
                               str(self.major_versions[service]), "query"])
        options = '&'.join(param_list)
        if params:
            if options:
                options += '&'
            options += compatibility.urlencode(params)
        if options:
            remoteaddr = "%s?%s" % (remoteaddr, options)
        if self.debug:
            print(('\nRequesting %s' % (remoteaddr)))
        req = compatibility.Request(url=remoteaddr, data=data, headers=headers)
        response = compatibility.urlopen(req, timeout=self.timeout)
        doc = response.read()
        return doc
Example #6
0
    def _HTTP_request(self, url, method, xml_string="", headers={}):
        """
        Send a HTTP request via urllib2.

        :type url: String
        :param url: Complete URL of resource
        :type method: String
        :param method: HTTP method of request, e.g. "PUT"
        :type headers: dict
        :param headers: Header information for request, e.g.
                {'User-Agent': "obspyck"}
        :type xml_string: String
        :param xml_string: XML for a send request (PUT/POST)
        """
        if method not in HTTP_ACCEPTED_METHODS:
            raise ValueError("Method must be one of %s" %
                             HTTP_ACCEPTED_METHODS)
        if method in HTTP_ACCEPTED_DATA_METHODS and not xml_string:
            raise TypeError("Missing data for %s request." % method)
        elif method in HTTP_ACCEPTED_NODATA_METHODS and xml_string:
            raise TypeError("Unexpected data for %s request." % method)

        req = _RequestWithMethod(method=method,
                                 url=url,
                                 data=xml_string,
                                 headers=headers)
        # it seems the following always ends in a HTTPError even with
        # nice status codes...?!?
        try:
            response = urlopen(req)
            return response.code, response.msg
        except HTTPError as e:
            return e.code, e.msg
Example #7
0
 def _fetch(self, url, *args, **kwargs):  # @UnusedVariable
     params = {}
     # map keywords
     for key, value in KEYWORDS.items():
         if key in list(kwargs.keys()):
             kwargs[value] = kwargs[key]
             del kwargs[key]
     # check for ranges and empty values
     for key, value in kwargs.items():
         if not value and value != 0:
             continue
         if isinstance(value, tuple) and len(value) == 2:
             params['min_' + str(key)] = str(value[0])
             params['max_' + str(key)] = str(value[1])
         elif isinstance(value, list) and len(value) == 2:
             params['min_' + str(key)] = str(value[0])
             params['max_' + str(key)] = str(value[1])
         else:
             params[str(key)] = str(value)
     # replace special characters
     remoteaddr = self.base_url + url + '?' + urlencode(params)
     if self.debug:
         print(('\nRequesting %s' % (remoteaddr)))
     # certain requests randomly fail on rare occasions, retry
     for _i in range(self.retries):
         try:
             response = urlopen(remoteaddr, timeout=self.timeout)
             doc = response.read()
             return doc
         # XXX currently there are random problems with SeisHub's internal
         # XXX sql database access ("cannot operate on a closed database").
         # XXX this can be circumvented by issuing the same request again..
         except Exception:
             continue
     response = urlopen(remoteaddr, timeout=self.timeout)
     doc = response.read()
     return doc
Example #8
0
 def _fetch(self, url, *args, **kwargs):  # @UnusedVariable
     params = {}
     # map keywords
     for key, value in KEYWORDS.items():
         if key in list(kwargs.keys()):
             kwargs[value] = kwargs[key]
             del kwargs[key]
     # check for ranges and empty values
     for key, value in kwargs.items():
         if not value and value != 0:
             continue
         if isinstance(value, tuple) and len(value) == 2:
             params['min_' + str(key)] = str(value[0])
             params['max_' + str(key)] = str(value[1])
         elif isinstance(value, list) and len(value) == 2:
             params['min_' + str(key)] = str(value[0])
             params['max_' + str(key)] = str(value[1])
         else:
             params[str(key)] = str(value)
     # replace special characters
     remoteaddr = self.base_url + url + '?' + urlencode(params)
     if self.debug:
         print(('\nRequesting %s' % (remoteaddr)))
     # certain requests randomly fail on rare occasions, retry
     for _i in range(self.retries):
         try:
             response = urlopen(remoteaddr, timeout=self.timeout)
             doc = response.read()
             return doc
         # XXX currently there are random problems with SeisHub's internal
         # XXX sql database access ("cannot operate on a closed database").
         # XXX this can be circumvented by issuing the same request again..
         except Exception:
             continue
     response = urlopen(remoteaddr, timeout=self.timeout)
     doc = response.read()
     return doc
Example #9
0
    def read(self, data):
        """
        General parser method for XML-SEED and Dataless SEED files.

        :type data: Filename, URL, Basestring or StringIO object.
        :param data: Filename, URL or XSEED/SEED string as file pointer or
            StringIO.
        """
        if getattr(self, "_format", None):
            warnings.warn("Clearing parser before every subsequent read()")
            self.__init__()
        # try to transform everything into StringIO object
        if isinstance(data, (str, native_str)):
            if "://" in data:
                # some URL
                data = compatibility.urlopen(data).read()
                data = compatibility.BytesIO(data)
            elif os.path.isfile(data):
                # looks like a file - read it
                with open(data, 'rb') as f:
                    data = f.read()
                data = compatibility.BytesIO(data)
            else:
                if PY2:
                    data = compatibility.BytesIO(data)
                else:
                    raise IOError("data is neither filename nor valid URL")
        # but could also be a big string with data
        elif isinstance(data, bytes):
            data = compatibility.BytesIO(data)
        elif not hasattr(data, "read"):
            raise TypeError
        # check first byte of data StringIO object
        first_byte = data.read(1)
        data.seek(0)
        if first_byte.isdigit():
            # SEED volumes starts with a number
            self._parseSEED(data)
            self._format = 'SEED'
        elif first_byte == b'<':
            # XML files should always starts with an '<'
            self._parseXSEED(data)
            self._format = 'XSEED'
        else:
            raise IOError("First byte of data must be in [0-9<]")
Example #10
0
    def _fetch(self, url, headers={}, **params):
        """
        Send a HTTP request via urllib2.

        :type url: str
        :param url: Complete URL of resource
        :type headers: dict
        :param headers: Additional header information for request
        """
        headers['User-Agent'] = self.user_agent
        # replace special characters
        remoteaddr = self.base_url + url + '?' + \
            compatibility.urlencode(params)
        if self.debug:
            print(('\nRequesting %s' % (remoteaddr)))
        response = compatibility.urlopen(remoteaddr, timeout=self.timeout)
        doc = response.read()
        return doc
Example #11
0
    def _fetch(self, url, headers={}, **params):
        """
        Send a HTTP request via urllib2.

        :type url: str
        :param url: Complete URL of resource
        :type headers: dict
        :param headers: Additional header information for request
        """
        headers['User-Agent'] = self.user_agent
        # replace special characters
        remoteaddr = self.base_url + url + '?' + \
            compatibility.urlencode(params)
        if self.debug:
            print(('\nRequesting %s' % (remoteaddr)))
        response = compatibility.urlopen(remoteaddr, timeout=self.timeout)
        doc = response.read()
        return doc
Example #12
0
    def saveWaveform(self, filename, network, station, location, channel,
                     starttime, endtime, format="MSEED"):
        """
        Writes a retrieved waveform directly into a file.

        This method ensures the storage of the unmodified waveform data
        delivered by the NERIES Web service, e.g. preserving the record based
        quality flags of MiniSEED files which would be neglected reading it
        with obspy.mseed.

        :type filename: str
        :param filename: Name of the output file.
        :type network: str
        :param network: Network code, e.g. ``'BW'``.
        :type station: str
        :param station: Station code, e.g. ``'MANZ'``.
        :type location: str
        :param location: Location code, e.g. ``'01'``. Location code may
            contain wild cards.
        :type channel: str
        :param channel: Channel code, e.g. ``'EHE'``. . Channel code may
            contain wild cards.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Start date and time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: End date and time.
        :type format: ``'FSEED'`` or ``'MSEED'``, optional
        :param format: Output format. Either as full SEED (``'FSEED'``) or
            Mini-SEED (``'MSEED'``) volume. Defaults to ``'MSEED'``.
        :return: None

        .. seealso:: http://www.orfeus-eu.org/wsdl/seismolink/seismolink.wsdl

        .. rubric:: Example

        >>> from obspy.neries import Client
        >>> c = Client(user='******')
        >>> dt = UTCDateTime("2009-04-01T00:00:00")
        >>> st = c.saveWaveform("outfile.fseed", "NL", "WIT", "", "BH*",
        ...                     dt, dt+30, format="FSEED")  #doctest: +SKIP
        """
        # enable logging if debug option is set
        if self.debug:
            import logging
            logging.basicConfig(level=logging.INFO)
            logging.getLogger('suds.client').setLevel(logging.DEBUG)
        # initialize client
        client = SudsClient(SEISMOLINK_WSDL)
        # set cache of 5 days
        cache = client.options.cache
        cache.setduration(days=5)
        # create user token
        usertoken = client.factory.create('UserTokenType')
        usertoken.email = self.user
        usertoken.password = self.password
        usertoken.label = self.user_agent.replace(' ', '_')
        usertoken.locale = ""
        # create station filter
        stationid = client.factory.create('StationIdentifierType')
        stationid.NetworkCode = network
        stationid.StationCode = station
        stationid.ChannelCode = channel
        stationid.LocId = location
        # adding default record length (4096) * delta to start and end time to
        # ensure right date times
        # XXX: 4096 may be overkill
        delta = guessDelta(channel) * 4096
        stationid.TimeSpan.TimePeriod.beginPosition = \
            (UTCDateTime(starttime) - delta).strftime("%Y-%m-%dT%H:%M:%S")
        stationid.TimeSpan.TimePeriod.endPosition = \
            (UTCDateTime(endtime) + delta).strftime("%Y-%m-%dT%H:%M:%S")
        # request data
        if format == 'MSEED':
            client.options.plugins = \
                [_AttributePlugin({'DataFormat': 'MSEED'})]
        # start data request
        response = client.service.dataRequest(usertoken, stationid)
        client.options.plugins = []
        # filter for request ids
        request_ids = [r._Id for r in response.RoutedRequest]
        if not request_ids:
            return
        # check status using request ids
        _loops = 0
        while True:
            response = client.service.checkStatus(usertoken, request_ids)
            status = [r.ReadyFlag for r in response.RoutedRequest]
            # if we hit MAX_REQUESTS break the loop
            if _loops > MAX_REQUESTS:
                msg = 'MAX_REQUESTS exceeded - breaking current request loop'
                warnings.warn(msg, UserWarning)
                break
            if "false" in status:
                # retry until all are set to 'true'
                _loops += 1
                continue
            break
        # keep only request ids which are fulfilled and have 'status = OK'
        request_ids = [r._Id for r in response.RoutedRequest
                       if 'Status: OK' in r.StatusDescription
                       and r.Fulfillment == 100]
        if not request_ids:
            return
        # retrieve download URLs using request ids
        response = client.service.dataRetrieve(usertoken, request_ids)
        urls = [r.DownloadToken.DownloadURL for r in response.DataItem]
        # create file handler if a file name is given
        if isinstance(filename, (str, native_str)):
            fh = open(filename, "wb")
        elif hasattr(filename, "write"):
            fh = filename
        else:
            msg = "Parameter filename must be either string or file handler."
            raise TypeError(msg)
        for url in urls:
            fh.write(compatibility.urlopen(url).read())
        if isinstance(filename, (str, native_str)):
            fh.close()
        # clean up
        response = client.service.purgeData(usertoken, request_ids)
Example #13
0
    def saveWaveform(self,
                     filename,
                     network,
                     station,
                     location,
                     channel,
                     starttime,
                     endtime,
                     format="MSEED"):
        """
        Writes a retrieved waveform directly into a file.

        This method ensures the storage of the unmodified waveform data
        delivered by the NERIES Web service, e.g. preserving the record based
        quality flags of MiniSEED files which would be neglected reading it
        with obspy.mseed.

        :type filename: str
        :param filename: Name of the output file.
        :type network: str
        :param network: Network code, e.g. ``'BW'``.
        :type station: str
        :param station: Station code, e.g. ``'MANZ'``.
        :type location: str
        :param location: Location code, e.g. ``'01'``. Location code may
            contain wild cards.
        :type channel: str
        :param channel: Channel code, e.g. ``'EHE'``. . Channel code may
            contain wild cards.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Start date and time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: End date and time.
        :type format: ``'FSEED'`` or ``'MSEED'``, optional
        :param format: Output format. Either as full SEED (``'FSEED'``) or
            Mini-SEED (``'MSEED'``) volume. Defaults to ``'MSEED'``.
        :return: None

        .. seealso:: http://www.orfeus-eu.org/wsdl/seismolink/seismolink.wsdl

        .. rubric:: Example

        >>> from obspy.neries import Client
        >>> c = Client(user='******')
        >>> dt = UTCDateTime("2009-04-01T00:00:00")
        >>> st = c.saveWaveform("outfile.fseed", "NL", "WIT", "", "BH*",
        ...                     dt, dt+30, format="FSEED")  #doctest: +SKIP
        """
        # enable logging if debug option is set
        if self.debug:
            import logging
            logging.basicConfig(level=logging.INFO)
            logging.getLogger('suds.client').setLevel(logging.DEBUG)
        # initialize client
        client = SudsClient(SEISMOLINK_WSDL)
        # set cache of 5 days
        cache = client.options.cache
        cache.setduration(days=5)
        # create user token
        usertoken = client.factory.create('UserTokenType')
        usertoken.email = self.user
        usertoken.password = self.password
        usertoken.label = self.user_agent.replace(' ', '_')
        usertoken.locale = ""
        # create station filter
        stationid = client.factory.create('StationIdentifierType')
        stationid.NetworkCode = network
        stationid.StationCode = station
        stationid.ChannelCode = channel
        stationid.LocId = location
        # adding default record length (4096) * delta to start and end time to
        # ensure right date times
        # XXX: 4096 may be overkill
        delta = guessDelta(channel) * 4096
        stationid.TimeSpan.TimePeriod.beginPosition = \
            (UTCDateTime(starttime) - delta).strftime("%Y-%m-%dT%H:%M:%S")
        stationid.TimeSpan.TimePeriod.endPosition = \
            (UTCDateTime(endtime) + delta).strftime("%Y-%m-%dT%H:%M:%S")
        # request data
        if format == 'MSEED':
            client.options.plugins = \
                [_AttributePlugin({'DataFormat': 'MSEED'})]
        # start data request
        response = client.service.dataRequest(usertoken, stationid)
        client.options.plugins = []
        # filter for request ids
        request_ids = [r._Id for r in response.RoutedRequest]
        if not request_ids:
            return
        # check status using request ids
        _loops = 0
        while True:
            response = client.service.checkStatus(usertoken, request_ids)
            status = [r.ReadyFlag for r in response.RoutedRequest]
            # if we hit MAX_REQUESTS break the loop
            if _loops > MAX_REQUESTS:
                msg = 'MAX_REQUESTS exceeded - breaking current request loop'
                warnings.warn(msg, UserWarning)
                break
            if "false" in status:
                # retry until all are set to 'true'
                _loops += 1
                continue
            break
        # keep only request ids which are fulfilled and have 'status = OK'
        request_ids = [
            r._Id for r in response.RoutedRequest
            if 'Status: OK' in r.StatusDescription and r.Fulfillment == 100
        ]
        if not request_ids:
            return
        # retrieve download URLs using request ids
        response = client.service.dataRetrieve(usertoken, request_ids)
        urls = [r.DownloadToken.DownloadURL for r in response.DataItem]
        # create file handler if a file name is given
        if isinstance(filename, (str, native_str)):
            fh = open(filename, "wb")
        elif hasattr(filename, "write"):
            fh = filename
        else:
            msg = "Parameter filename must be either string or file handler."
            raise TypeError(msg)
        for url in urls:
            fh.write(compatibility.urlopen(url).read())
        if isinstance(filename, (str, native_str)):
            fh.close()
        # clean up
        response = client.service.purgeData(usertoken, request_ids)