Пример #1
0
    def test_validate_base_url(self):
        """
        Tests the _validate_base_url() method.
        """

        test_urls_valid = list(URL_MAPPINGS.values())
        test_urls_valid += [
            "http://arclink.ethz.ch", "http://example.org",
            "https://webservices.rm.ingv.it", "http://localhost:8080/test/",
            "http://93.63.40.85/", "http://[::1]:80/test/",
            "http://[2001:db8:85a3:8d3:1319:8a2e:370:7348]",
            "http://[2001:db8::ff00:42:8329]", "http://[::ffff:192.168.89.9]",
            "http://jane", "http://localhost"
        ]

        test_urls_fails = [
            "http://",
            "http://127.0.1",
            "http://127.=.0.1",
            "http://127.0.0.0.1",
        ]
        test_urls_fails += [
            "http://[]",
            "http://[1]",
            "http://[1:2]",
            "http://[1::2::3]",
            "http://[1::2:3::4]",
            "http://[1:2:2:4:5:6:7]",
        ]

        for url in test_urls_valid:
            self.assertEqual(self.client._validate_base_url(url), True)

        for url in test_urls_fails:
            self.assertEqual(self.client._validate_base_url(url), False)
Пример #2
0
    def __init__(self, providers=None):
        # Bit of a hack!
        URL_MAPPINGS["INGV"] =  "http://webservices.rm.ingv.it"
        
        if providers is None:
            providers = dict(URL_MAPPINGS.items())
            # In that case make sure IRIS is first, and ORFEUS second! The
            # remaining items will be sorted alphabetically.
            _p = []
          #  del providers["ORFEUS"]
            if "IRIS" in providers:
                _p.append("IRIS")
                del providers["IRIS"]
            #if "ORFEUS" in providers:
            #    _p.append("ORFEUS")
            #del providers["ODC"]
            del providers["ORFEUS"]
            _p.extend(sorted(providers))
            providers = _p
            
        self.providers = tuple(providers)

        # Initialize all clients.
        self._initialized_clients = OrderedDict()
        self.__initialize_clients()
Пример #3
0
def get_multiple_fdsn_clients(
        clients: List[str] or str or None) -> Tuple[Client]:
    """
    Returns a tuple holding all the queried fdsn providers. Also finds
    all available fdsn providers if input is None. Will also sort the clients
    so the big providers (IRIS and ORFEUS) come last.

    Just a modified version of a code in the obspy mass downloader.

    :param clients: List of strings, each describing one client.
        Put None if you want to use all available.
    :type clients: List[str] or str or None
    :return: Tuple of the fdsn Client objects.
    :rtype: Tuple[Client]
    """
    # That bit is stolen from the massdownloader
    if isinstance(clients, str):
        clients = [clients]
    # That bit is stolen from the massdownloader
    elif clients is None:
        providers = dict(URL_MAPPINGS.items())
        _p = []

        if "RASPISHAKE" in providers:
            # exclude RASPISHAKE by default
            del providers["RASPISHAKE"]

        if "IRIS" in providers:
            has_iris = True
            del providers["IRIS"]
        else:
            has_iris = False

        if "ODC" in providers:
            providers["ORFEUS"] = providers["ODC"]
            del providers["ODC"]

        if "ORFEUS" in providers:
            has_orfeus = True
            del providers["ORFEUS"]
        else:
            has_orfeus = False

        _p = sorted(providers)
        if has_orfeus:
            _p.append("ORFEUS")
        if has_iris:
            _p.append("IRIS")

        providers = _p

        clients = tuple(providers)
    return clients
Пример #4
0
def event(baseurl='IRIS',
          starttime=None,
          endtime=None,
          minmag=3.,
          maxmag=10.,
          maxnumber=10,
          catalog=None):

    try:
        client = Client(baseurl)
        print("Using " + baseurl + "...", file=sys.stderr)
    except:
        print("fdsn client failed")
        from obspy.clients.fdsn.header import URL_MAPPINGS
        for key in sorted(URL_MAPPINGS.keys()):
            print("{0:<7} {1}".format(key, URL_MAPPINGS[key]), file=sys.stderr)

        sys.exit()

    try:
        starttime = UTCDateTime(starttime)
        endtime = UTCDateTime(endtime)
    except:
        starttime = UTCDateTime() - 365 * 24 * 60 * 60
        endtime = UTCDateTime()
        print('Auto time limit:', file=sys.stderr)
        print(starttime, file=sys.stderr)
        print(endtime, file=sys.stderr)
        #print("time conversion failed")
        #sys.exit()

    if catalog:
        cat = client.get_events(limit=maxnumber,
                                orderby="magnitude",
                                starttime=starttime,
                                endtime=endtime,
                                minmagnitude=minmag,
                                maxmagnitude=maxmag,
                                catalog=catalog)
    else:
        cat = client.get_events(limit=maxnumber,
                                orderby="magnitude",
                                starttime=starttime,
                                endtime=endtime,
                                minmagnitude=minmag,
                                maxmagnitude=maxmag)
    print(cat.__str__(print_all=True), file=sys.stderr)
    for e in cat:
        print(e.resource_id)
Пример #5
0
    def __init__(self, providers=None, debug=False):
        self.debug = debug
        # If not given, use all providers ObsPy knows. They will be sorted
        # alphabetically except that ORFEUS is second to last and IRIS last.
        # The reason for this order is that smaller data centers can be
        # expected to have more up-to-date meta information about their own
        # data and thus should be prioritized when downloading data.
        if providers is None:
            providers = dict(URL_MAPPINGS.items())
            _p = []

            if "RASPISHAKE" in providers:
                # exclude RASPISHAKE by default
                del providers["RASPISHAKE"]

            if "IRIS" in providers:
                has_iris = True
                del providers["IRIS"]
            else:
                has_iris = False

            if "ODC" in providers:
                providers["ORFEUS"] = providers["ODC"]
                del providers["ODC"]

            if "ORFEUS" in providers:
                has_orfeus = True
                del providers["ORFEUS"]
            else:
                has_orfeus = False

            # leave out IRISPH5 which is for nodal experiments and might match
            # insanely large datasets, depending on restrictions
            if "IRISPH5" in providers:
                del providers["IRISPH5"]

            _p = sorted(providers)
            if has_orfeus:
                _p.append("ORFEUS")
            if has_iris:
                _p.append("IRIS")

            providers = _p

        self.providers = tuple(providers)

        # Initialize all clients.
        self._initialized_clients = collections.OrderedDict()
        self._initialize_clients()
Пример #6
0
    def __init__(self, pyweed, parent=None):
        super(PreferencesDialog, self).__init__(parent=parent)
        self.setupUi(self)

        self.pyweed = pyweed

        # Ordered list of all available data centers
        self.data_centers = sorted(URL_MAPPINGS.keys())

        # Put these in the comboboxes
        for data_center in self.data_centers:
            label = "%s: %s" % (data_center, URL_MAPPINGS[data_center])
            self.eventDataCenterComboBox.addItem(label, data_center)
            self.stationDataCenterComboBox.addItem(label, data_center)

        self.okButton.pressed.connect(self.accept)
        self.cancelButton.pressed.connect(self.reject)
Пример #7
0
    def __init__(self, pyweed, parent=None):
        super(PreferencesDialog, self).__init__(parent=parent)
        self.setupUi(self)

        self.pyweed = pyweed

        # Ordered list of all available data centers
        self.data_centers = sorted(URL_MAPPINGS.keys())

        # Put these in the comboboxes
        for data_center in self.data_centers:
            label = "%s: %s" % (data_center, URL_MAPPINGS[data_center])
            self.eventDataCenterComboBox.addItem(label, data_center)
            self.stationDataCenterComboBox.addItem(label, data_center)

        self.okButton.pressed.connect(self.accept)
        self.cancelButton.pressed.connect(self.reject)
Пример #8
0
def getClients(starttime, endtime, lslat, lslon, radius=200.):
    """
    Returns list of valid clients/network codes to request data from in 
    getStreamObject, given landslide coordinates and a time range to search for
    data in.
    INPUTS
    starttime (UTCDateTime) - start time of stream object
    endtime (UTCDateTime) - end time of stream object
    lslat (float) - latitudinal coordinate of landslide (make negative for south
        of Equator)
    lslon (float) - longitudinal coordinate of landslide (make negative for west
        of Prime Meridian)
    radius (float) - optional; search radius in km for finding nearest seismic
        stations
    OUTPUT
    valid_clients (list of strings) - list of FDSN network codes that will return
        seismic traces
    """
    # Full list of FDSN webservice clients
    full_client_list = [key for key in sorted(URL_MAPPINGS.keys())]
    valid_clients = []

    # Seismic channels to search for
    channels = 'EHZ,BHZ,HHZ'

    # Search for data within initial radius
    print('Retrieving data from stations within %i km of event...' %
          int(radius))
    for i in range(0, len(full_client_list)):
        client = full_client_list[i]
        try:
            reviewData.getepidata(lslat,
                                  lslon,
                                  starttime,
                                  tstart=0.,
                                  tend=endtime - starttime,
                                  minradiuskm=0.,
                                  maxradiuskm=radius,
                                  chanuse=channels,
                                  location='*',
                                  clientnames=client)
            valid_clients.append(client)
        except:
            pass

    return (valid_clients)
Пример #9
0
    def test_validate_base_url(self):
        """
        Tests the _validate_base_url() method.
        """

        test_urls_valid = list(URL_MAPPINGS.values())
        test_urls_valid += [
            "http://arclink.ethz.ch",
            "http://example.org",
            "https://webservices.rm.ingv.it",
            "http://localhost:8080/test/",
            "http://93.63.40.85/",
            "http://[::1]:80/test/",
            "http://[2001:db8:85a3:8d3:1319:8a2e:370:7348]",
            "http://[2001:db8::ff00:42:8329]",
            "http://[::ffff:192.168.89.9]",
            "http://jane",
            "http://localhost"
            ]

        test_urls_fails = [
            "http://",
            "http://127.0.1",
            "http://127.=.0.1",
            "http://127.0.0.0.1",
            ]
        test_urls_fails += [
            "http://[]",
            "http://[1]",
            "http://[1:2]",
            "http://[1::2::3]",
            "http://[1::2:3::4]",
            "http://[1:2:2:4:5:6:7]",
            ]

        for url in test_urls_valid:
            self.assertEqual(
                self.client._validate_base_url(url),
                True)

        for url in test_urls_fails:
            self.assertEqual(
                self.client._validate_base_url(url),
                False)
    def __buildUI__(self):

        self.setWindowTitle('InfraView - Reconcile Stations')

        blurb = QLabel(
            self.
            tr('This will ATTEMPT to download the station info for the following Stations.'
               ))
        blurb.setWordWrap(True)

        # First lets populate the client drop down
        self.cb = QComboBox()
        label_service_name = QLabel(self.tr('Service: '))
        for key in sorted(URL_MAPPINGS.keys()):
            self.cb.addItem(key)
        self.cb.setCurrentText('IRIS')

        self.stationListEdit = QListWidget()
        self.stationListEdit.setMinimumWidth(300)

        self.statusLine = QLabel(' ')

        self.attemptButton = QPushButton(self.tr('Attempt to Download'))

        # OK and Cancel buttons
        buttons = QDialogButtonBox(
            QDialogButtonBox.Ok | QDialogButtonBox.Cancel, Qt.Horizontal, self)
        buttons.button(QDialogButtonBox.Ok).setText('Add to Station List')
        buttons.accepted.connect(self.accept)
        buttons.rejected.connect(self.reject)

        layout = QVBoxLayout()
        layout.addWidget(blurb)
        layout.addWidget(label_service_name)
        layout.addWidget(self.cb)
        layout.addWidget(self.stationListEdit)
        layout.addWidget(self.attemptButton)
        layout.addWidget(self.statusLine)
        layout.addWidget(buttons)

        self.setLayout(layout)

        self.connectSignalsandSlots()
Пример #11
0
def event(baseurl='IRIS', playback='date', **kwargs):

    try:
        client = Client(baseurl)
        print("Using " + baseurl + "...", file=sys.stderr)
    except:
        print("fdsn client failed")
        from obspy.clients.fdsn.header import URL_MAPPINGS
        for key in sorted(URL_MAPPINGS.keys()):
            print("{0:<7} {1}".format(key, URL_MAPPINGS[key]), file=sys.stderr)

        sys.exit()

    try:
        kwargs['starttime'] = UTCDateTime(kwargs['starttime'])
        kwargs['endtime'] = UTCDateTime(kwargs['endtime'])
    except:
        kwargs['starttime'] = UTCDateTime() - 365 * 24 * 60 * 60
        kwargs['endtime'] = UTCDateTime()
        print('Auto time limit:', file=sys.stderr)
        print(starttime, file=sys.stderr)
        print(endtime, file=sys.stderr)
        #print("time conversion failed")
        #sys.exit()

    cat = client.get_events(**kwargs)

    #if catalog:
    #	cat = client.get_events(limit=maxnumber, orderby="magnitude",starttime=starttime, endtime=endtime, minmagnitude=minmag, maxmagnitude=maxmag, catalog=catalog)
    #else:
    #	cat = client.get_events(limit=maxnumber, orderby="magnitude",starttime=starttime, endtime=endtime, minmagnitude=minmag, maxmagnitude=maxmag)

    print(cat.__str__(print_all=True), file=sys.stderr)

    if 'evid' in playback:
        for e in cat.events:
            print(playback % ("\"" + e.resource_id + "\""))
    else:
        for e in cat.events:
            o = e.preferred_origin_id.get_referred_object()
            print(playback %
                  ((o.time - 60 * 3 / 9).strftime("\"%Y-%m-%d %H:%M:%S\""),
                   (o.time + 60 * 6 / 9).strftime("\"%Y-%m-%d %H:%M:%S\"")))
Пример #12
0
    def __init__(self, providers=None, debug=False):
        self.debug = debug
        # If not given, use all providers ObsPy knows. They will be sorted
        # alphabetically except that ORFEUS is second to last and IRIS last.
        # The reason for this order is that smaller data centers can be
        # expected to have more up-to-date meta information about their own
        # data and thus should be prioritized when downloading data.
        if providers is None:
            providers = dict(URL_MAPPINGS.items())
            _p = []

            if "IRIS" in providers:
                has_iris = True
                del providers["IRIS"]
            else:
                has_iris = False

            if "ODC" in providers:
                providers["ORFEUS"] = providers["ODC"]
                del providers["ODC"]

            if "ORFEUS" in providers:
                has_orfeus = True
                del providers["ORFEUS"]
            else:
                has_orfeus = False

            _p = sorted(providers)
            if has_orfeus:
                _p.append("ORFEUS")
            if has_iris:
                _p.append("IRIS")

            providers = _p

        self.providers = tuple(providers)

        # Initialize all clients.
        self._initialized_clients = OrderedDict()
        self._initialize_clients()
Пример #13
0
    def __init__(self, providers=None, debug=False):
        self.debug = debug
        # If not given, use all providers ObsPy knows. They will be sorted
        # alphabetically except that ORFEUS is second to last and IRIS last.
        # The reason for this order is that smaller data centers can be
        # expected to have more up-to-date meta information about their own
        # data and thus should be prioritized when downloading data.
        if providers is None:
            providers = dict(URL_MAPPINGS.items())
            _p = []

            if "IRIS" in providers:
                has_iris = True
                del providers["IRIS"]
            else:
                has_iris = False

            if "ODC" in providers:
                providers["ORFEUS"] = providers["ODC"]
                del providers["ODC"]

            if "ORFEUS" in providers:
                has_orfeus = True
                del providers["ORFEUS"]
            else:
                has_orfeus = False

            _p = sorted(providers)
            if has_orfeus:
                _p.append("ORFEUS")
            if has_iris:
                _p.append("IRIS")

            providers = _p

        self.providers = tuple(providers)

        # Initialize all clients.
        self._initialized_clients = OrderedDict()
        self._initialize_clients()
Пример #14
0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            obspy_stream_handler = obspy_logger.handlers[0]
            obspy_logger.removeHandler(obspy_stream_handler)
            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        domain = CircularDomain(latitude=self.lat,
                                longitude=self.lon,
                                minradius=0,
                                maxradius=self.radius)

        restrictions = Restrictions(
            # Define the temporal bounds of the waveform data.
            starttime=origin_time - self.time_before,
            endtime=origin_time + self.time_after,
            network=self.network,
            station='*',
            location='*',
            location_priorities=['*'],
            reject_channels_with_gaps=self.reject_channels_with_gaps,
            # Any trace that is shorter than 95 % of the
            # desired total duration will be discarded.
            minimum_length=self.minimum_length,
            sanitize=self.sanitize,
            minimum_interstation_distance_in_m=self.
            minimum_interstation_distance_in_m,
            exclude_networks=self.exclude_networks,
            exclude_stations=self.exclude_stations,
            channel_priorities=self.channels)

        # DEBUGGING
        pp = pprint.PrettyPrinter()
        pp.pprint(domain.__dict__)
        print('***************************')
        pp.pprint(restrictions.__dict__)
        # DEBUGGING

        # For each of the providers, check if we have a username and password
        # provided in the config. If we do, initialize the client with the
        # username and password. Otherwise, use default initalization.
        fdsn_config = self.config['fetchers']['FDSNFetcher']
        client_list = []
        for provider_str in URL_MAPPINGS.keys():
            if provider_str in fdsn_config:
                client = Client(provider_str,
                                user=fdsn_config[provider_str]['user'],
                                password=fdsn_config[provider_str]['password'])
            else:
                client = Client(provider_str)
            client_list.append(client)

        # Pass off the initalized clients to the Mass Downloader
        mdl = MassDownloader(providers=client_list)

        # we can have a problem of file overlap, so let's remove existing
        # mseed files from the raw directory.
        logging.info('Deleting old MiniSEED files...')
        delete_old_files(rawdir, '*.mseed')

        # remove existing png files as well
        logging.info('Deleting old PNG files...')
        delete_old_files(rawdir, '*.png')

        # remove existing xml files as well
        logging.info('Deleting old XML files...')
        delete_old_files(rawdir, '*.xml')

        logging.info('Downloading new MiniSEED files...')
        # The data will be downloaded to the ``./waveforms/`` and ``./stations/``
        # folders with automatically chosen file names.
        mdl.download(domain,
                     restrictions,
                     mseed_storage=rawdir,
                     stationxml_storage=rawdir)

        seed_files = glob.glob(os.path.join(rawdir, '*.mseed'))
        streams = []
        for seed_file in seed_files:
            tstreams = read_fdsn(seed_file)
            streams += tstreams

        stream_collection = StreamCollection(streams=streams,
                                             drop_non_free=self.drop_non_free)
        return stream_collection
Пример #15
0
    def __init__(self, providers=None, debug=False, configure_logging=True):
        if configure_logging:
            logger.setLevel(logging.DEBUG)
            # Prevent propagating to higher loggers.
            logger.propagate = 0
            # Console log handler.
            ch = logging.StreamHandler()
            ch.setLevel(logging.INFO)
            # Add formatter
            formatter = logging.Formatter(
                    "[%(asctime)s] - %(name)s - %(levelname)s: %(message)s")
            ch.setFormatter(formatter)
            logger.addHandler(ch)
        self.debug = debug
        # If not given, use all providers ObsPy knows. They will be sorted
        # alphabetically except that ORFEUS is second to last and IRIS last.
        # The reason for this order is that smaller data centers can be
        # expected to have more up-to-date meta information about their own
        # data and thus should be prioritized when downloading data.
        if providers is None:
            providers = dict(URL_MAPPINGS.items())
            _p = []

            if "RASPISHAKE" in providers:
                # exclude RASPISHAKE by default
                del providers["RASPISHAKE"]

            if "IRIS" in providers:
                has_iris = True
                del providers["IRIS"]
            else:
                has_iris = False

            if "ODC" in providers:
                providers["ORFEUS"] = providers["ODC"]
                del providers["ODC"]

            if "ORFEUS" in providers:
                has_orfeus = True
                del providers["ORFEUS"]
            else:
                has_orfeus = False

            # leave out IRISPH5 which is for nodal experiments and might match
            # insanely large datasets, depending on restrictions
            if "IRISPH5" in providers:
                del providers["IRISPH5"]

            _p = sorted(providers)
            if has_orfeus:
                _p.append("ORFEUS")
            if has_iris:
                _p.append("IRIS")

            providers = _p

        self.providers = tuple(providers)

        # Initialize all clients.
        self._initialized_clients = collections.OrderedDict()
        self._initialize_clients()
Пример #16
0
#!/usr/bin/env python

from obspy.clients.fdsn.header import URL_MAPPINGS

for key in sorted(URL_MAPPINGS.keys()):
    print("{0:<7} {1}".format(key, URL_MAPPINGS[key]))
Пример #17
0
    def __buildUI__(self):

        # Put together the options container
        gridLayout = QGridLayout()
        optionsContainer = QWidget()
        optionsContainer.setLayout(gridLayout)

        # First lets populate the client drop down
        self.cb = QComboBox()
        label_service_name = QLabel(self.tr('Service:'))

        fdsn_dictionary = URL_MAPPINGS
        fdsn_dictionary.update(
            {'RaspShake': 'https://fdsnws.rasberryshakedata.com'})

        for key in sorted(URL_MAPPINGS.keys()):
            self.cb.addItem(key)
        self.cb.setCurrentText('IRIS')
        self.cb.currentIndexChanged[str].connect(self.onActivated_cb)

        validator = IPValidator(self)
        label_network_name = QLabel(self.tr('Network: '))
        self.networkNameBox = QLineEdit()
        self.networkNameBox.setToolTip(
            'Wildcards OK \nCan be SEED network codes or data center defined codes. \nMultiple codes are comma-separated (e.g. "IU,TA").'
        )
        self.networkNameBox.setValidator(validator)

        label_station_name = QLabel(self.tr('Station: '))
        self.stationNameBox = QLineEdit()
        self.stationNameBox.setToolTip(
            'Wildcards OK \nOne or more SEED station codes. \nMultiple codes are comma-separated (e.g. "ANMO,PFO")'
        )
        self.stationNameBox.setValidator(validator)

        label_location_str = QLabel(self.tr('Location:'))
        self.location_Box = QLineEdit('*')
        self.location_Box.setToolTip(
            'Wildcards OK \nOne or more SEED location identifiers. \nMultiple identifiers are comma-separated (e.g. "00,01"). \nAs a special case “--“ (two dashes) will be translated to a string of two space characters to match blank location IDs.'
        )
        self.location_Box.setValidator(validator)

        label_channel_str = QLabel(self.tr('Channel:'))
        self.channel_Box = QLineEdit('*')
        self.channel_Box.setToolTip(
            'Wildcards OK \nOne or more SEED channel codes. \nMultiple codes are comma-separated (e.g. "BHZ,HHZ")'
        )
        self.channel_Box.setValidator(validator)

        label_startDate = QLabel(self.tr('Start Date (UTC):'))
        self.startDate_edit = QDateEdit()
        self.startDate_edit.setMinimumDate(QDate(1900, 1, 1))
        self.startDate_edit.setDisplayFormat('yyyy-MM-dd')
        self.startDate_edit.setDate(self.startDate_edit.minimumDate())

        label_startTime = QLabel(self.tr('Start Time (UTC):'))
        self.startTime_edit = QTimeEdit()
        self.startTime_edit.setDisplayFormat('HH:mm:ss.zzz')

        label_traceLength = QLabel(self.tr('Trace Length (s)'))
        self.traceLength_t = QSpinBox()
        self.traceLength_t.setMinimum(1)
        self.traceLength_t.setMaximum(999999999)
        self.traceLength_t.setValue(3600)

        replaceWaveButton = QPushButton('Replace')
        replaceWaveButton.clicked.connect(self.onClicked_replace)
        appendWaveButton = QPushButton('Append')
        appendWaveButton.clicked.connect(self.onClicked_append)

        self.stationListWidget = QListWidget()
        self.stationListWidget.setSelectionMode(
            QAbstractItemView.ExtendedSelection)
        self.stationListWidget.itemSelectionChanged.connect(
            self.populateStationInfoFromStationList)

        self.browserButton = QPushButton('Station Browser')
        self.browserButton.clicked.connect(self.onClicked_browserButton)

        gridLayout.addWidget(label_service_name, 0, 0)
        gridLayout.addWidget(self.cb, 0, 1)
        gridLayout.addWidget(label_network_name, 1, 0)
        gridLayout.addWidget(self.networkNameBox, 1, 1)
        gridLayout.addWidget(label_station_name, 2, 0)
        gridLayout.addWidget(self.stationNameBox, 2, 1)
        gridLayout.addWidget(label_location_str, 3, 0)
        gridLayout.addWidget(self.location_Box, 3, 1)
        gridLayout.addWidget(label_channel_str, 4, 0)
        gridLayout.addWidget(self.channel_Box, 4, 1)
        gridLayout.addWidget(label_startDate, 5, 0)
        gridLayout.addWidget(self.startDate_edit, 5, 1)
        gridLayout.addWidget(label_startTime, 6, 0)
        gridLayout.addWidget(self.startTime_edit, 6, 1)
        gridLayout.addWidget(label_traceLength, 7, 0)
        gridLayout.addWidget(self.traceLength_t, 7, 1)
        # gridLayout.addWidget(importEventButton, 8, 1, 1, 2)

        horzLayout = QHBoxLayout(self)
        horzLayout.addWidget(replaceWaveButton)
        horzLayout.addWidget(appendWaveButton)
        addGroupBox = QGroupBox("Get Waveform(s)")
        addGroupBox.setLayout(horzLayout)

        vertlayout = QVBoxLayout(self)
        vertlayout.addWidget(optionsContainer)
        vertlayout.addWidget(addGroupBox)
        vertlayout.addWidget(self.stationListWidget)
        vertlayout.addWidget(self.browserButton)

        self.setLayout(vertlayout)

        # create stationdialog here so that you only create it once, from here on you just run exec_() to make it pop up
        self.stationDialog = IPStationBrowser.IPStationDialog()
Пример #18
0
def event(baseurl='IRIS',
          playback=None,
          days=1,
          qml=None,
          fin=None,
          tin=None,
          country=None,
          **kwargs):

    try:
        client = Client(baseurl)
        print("Using " + baseurl + "...", file=sys.stderr)
    except:
        print("fdsn client failed", file=sys.stderr)
        from obspy.clients.fdsn.header import URL_MAPPINGS
        for key in sorted(URL_MAPPINGS.keys()):
            print("{0:<7} {1}".format(key, URL_MAPPINGS[key]), file=sys.stderr)

        sys.exit()

    try:
        kwargs['starttime'] = UTCDateTime(kwargs['starttime'])
        kwargs['endtime'] = UTCDateTime(kwargs['endtime'])
    except:
        kwargs['starttime'] = UTCDateTime() - float(days) * 24 * 60 * 60
        kwargs['endtime'] = UTCDateTime()

    print('kwargs:', kwargs, file=sys.stderr)
    try:
        cat = client.get_events(**kwargs)
    except:
        print('No response.', file=sys.stderr)
        sys.exit()

    if country is not None:
        ok = False
        limit = len(cat.events)
        kwargs['offset'] = len(cat.events)
        while not ok:
            ok = True
            remove = []
            kwargs['limit'] = 0
            for i, e in enumerate(cat.events):
                lalo = [
                    e.preferred_origin().latitude,
                    e.preferred_origin().longitude
                ]
                gcode = geocoder.osm(lalo, method='reverse').json
                if gcode['country_code'].lower() not in country.lower():
                    kwargs['limit'] += 1
                    ok = False
                    remove += [e]
                    print('removing %d (%s, %s): %s (requesting %d after %d)' %
                          (i, lalo[0], lalo[1], gcode['country_code'],
                           kwargs['limit'], kwargs['offset']),
                          file=sys.stderr)
            if not ok:
                for e in remove:
                    cat.events.remove(e)
                if len(cat.events) >= limit:
                    print('Clean stable catalog of %d events' %
                          len(cat.events),
                          file=sys.stderr)
                    break
                print('kwargs:', kwargs, file=sys.stderr)
                try:
                    tmp = client.get_events(**kwargs)
                except:
                    print('No more events than %d' % len(cat.events),
                          file=sys.stderr)
                    break
                cat += tmp
                kwargs['offset'] += len(tmp.events)
    for e in cat.events:
        print("Event \"%s\":\t%s" % (str(e.resource_id), e.short_str()),
              file=sys.stderr)

    if qml is not None:
        cat.write(qml, format='SC3ML')

    if fin is not None:
        with open(fin, 'w') as f:
            f.write('\n'.join([str(e.resource_id) for e in cat.events]) + '\n')
    if tin is not None:
        with open(tin, 'w') as f:
            for e in cat.events:
                o = e.preferred_origin_id.get_referred_object()
                f.write('%s %s\n' %
                        ((o.time - 60 * 3 / 9).strftime("%Y-%m-%dT%H:%M:%S"),
                         (o.time + 60 * 6 / 9).strftime("%Y-%m-%dT%H:%M:%S")))

    if playback is not None:
        if 'evid' in playback:
            for e in cat.events:
                print(playback % ("\"" + str(e.resource_id) + "\""))
        else:
            for e in cat.events:
                o = e.preferred_origin_id.get_referred_object()
                print(
                    playback %
                    ((o.time - 60 * 3 / 9).strftime("\"%Y-%m-%d %H:%M:%S\""),
                     (o.time + 60 * 6 / 9).strftime("\"%Y-%m-%d %H:%M:%S\"")))
Пример #19
0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        # Bail out if FDSNFetcher not configured
        if 'FDSNFetcher' not in self.config['fetchers']:
            return
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            obspy_stream_handler = obspy_logger.handlers[0]
            obspy_logger.removeHandler(obspy_stream_handler)
            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        domain = CircularDomain(latitude=self.lat,
                                longitude=self.lon,
                                minradius=0,
                                maxradius=self.radius)

        min_dist = self.minimum_interstation_distance_in_m
        restrictions = Restrictions(
            # Define the temporal bounds of the waveform data.
            starttime=origin_time - self.time_before,
            endtime=origin_time + self.time_after,
            network=self.network,
            station='*',
            location='*',
            location_priorities=['*'],
            reject_channels_with_gaps=self.reject_channels_with_gaps,
            # Any trace that is shorter than 95 % of the
            # desired total duration will be discarded.
            minimum_length=self.minimum_length,
            sanitize=self.sanitize,
            minimum_interstation_distance_in_m=min_dist,
            exclude_networks=self.exclude_networks,
            exclude_stations=self.exclude_stations,
            channel_priorities=self.channels)

        # For each of the providers, check if we have a username and password
        # provided in the config. If we do, initialize the client with the
        # username and password. Otherwise, use default initalization.
        client_list = []
        for provider_str in URL_MAPPINGS.keys():
            if provider_str == GEO_NET_ARCHIVE_KEY:
                dt = UTCDateTime.utcnow() - UTCDateTime(self.time)
                if dt < GEONET_ARCHIVE_DAYS:
                    provider_str = GEONET_REALTIME_URL
            try:
                fdsn_config = self.config['fetchers']['FDSNFetcher']
                if provider_str in fdsn_config:
                    client = Client(
                        provider_str,
                        user=fdsn_config[provider_str]['user'],
                        password=fdsn_config[provider_str]['password'])
                else:
                    client = Client(provider_str)
                client_list.append(client)
            # If the FDSN service is down, then an FDSNException is raised
            except FDSNException:
                logging.warning('Unable to initalize client %s' % provider_str)
            except KeyError:
                logging.warning('Unable to initalize client %s' % provider_str)

        if len(client_list):
            # Pass off the initalized clients to the Mass Downloader
            mdl = MassDownloader(providers=client_list)

            logging.info('Downloading new MiniSEED files...')
            # The data will be downloaded to the ``./waveforms/`` and
            # ``./stations/`` folders with automatically chosen file names.
            mdl.download(domain,
                         restrictions,
                         mseed_storage=rawdir,
                         stationxml_storage=rawdir)

            seed_files = glob.glob(os.path.join(rawdir, '*.mseed'))
            streams = []
            for seed_file in seed_files:
                try:
                    tstreams = read_obspy(seed_file, self.config)
                except BaseException as e:
                    tstreams = None
                    fmt = 'Could not read seed file %s - "%s"'
                    logging.info(fmt % (seed_file, str(e)))
                if tstreams is None:
                    continue
                else:
                    streams += tstreams

            stream_collection = StreamCollection(
                streams=streams, drop_non_free=self.drop_non_free)
            return stream_collection
Пример #20
0
# -*- coding: utf-8 -*-
"""
Add FDSN URL's to those listed by default in obspy.clients.fdsn.header.URL_MAPPINGS 

:license:
    GNU Lesser General Public License, Version 3
    (http://www.gnu.org/copyleft/lesser.html)
"""
from obspy.clients.fdsn.header import URL_MAPPINGS

MORE_FDSN = {
    'AUSPASS': '******',
    'BATS': 'http://batsws.earth.sinica.edu.tw',
    'LOCALHOST': 'http://localhost:8080'
}

URL_MAPPINGS.update(MORE_FDSN)
Пример #21
0
 def test_with_None(self):
     exp = sorted(dict(URL_MAPPINGS.items()))
     out = pu.get_multiple_fdsn_clients(None)
     self.assertGreater(len(out), 5)
     for el in out:
         self.assertIn(el, exp)
Пример #22
0
    def __init__(self, user_request=None, logger=None):
        """
        Initializes the ISPAQ data access expediter.

        See :mod:`ispaq.concierge` for all parameters.
        """
        # Keep the entire UserRequest and logger
        self.user_request = user_request
        self.logger = logger
        
        # Copy important UserRequest properties to the Concierge for smpler access
        self.requested_starttime = user_request.requested_starttime
        self.requested_endtime = user_request.requested_endtime
        self.metric_names = user_request.metrics
        self.sncl_patterns = user_request.sncls
        self.function_by_logic = user_request.function_by_logic
        self.logic_types = user_request.function_by_logic.keys()
        
        # Individual elements from the Preferences: section of the preferences file
        self.csv_output_dir = user_request.csv_output_dir
        self.plot_output_dir = user_request.plot_output_dir
        self.sigfigs = user_request.sigfigs
        
        # Output information
        file_base = '%s_%s_%s' % (self.user_request.requested_metric_set,
                                  self.user_request.requested_sncl_set, 
                                  self.requested_starttime.date)
        self.output_file_base = self.csv_output_dir + '/' + file_base
        
        # Availability dataframe is stored if it is read from a local file
        self.availability = None
        
        # Filtered availability dataframe is stored for potential reuse
        self.filtered_availability = None
        
        # Add dataselect clients and URLs or reference a local file
        if user_request.dataselect_url in URL_MAPPINGS.keys():
            # Get data from FDSN dataselect service
            self.dataselect_url = URL_MAPPINGS[user_request.dataselect_url]
            self.dataselect_client = Client(user_request.dataselect_url)
        else:
            if os.path.exists(os.path.abspath(user_request.dataselect_url)):
                # Get data from local miniseed files
                self.dataselect_url = os.path.abspath(user_request.dataselect_url)
                self.dataselect_client = None
            else:
                err_msg = "Cannot find preference file dataselect_url: '%s'" % user_request.dataselect_url
                self.logger.error(err_msg)
                raise ValueError(err_msg)

        # Add event clients and URLs or reference a local file
        if user_request.event_url in URL_MAPPINGS.keys():
            self.event_url = URL_MAPPINGS[user_request.event_url]
            self.event_client = Client(user_request.event_url)
        else:
            if os.path.exists(os.path.abspath(user_request.event_url)):
                # Get data from local QUAKEML files
                self.event_url = os.path.abspath(user_request.event_url)
                self.event_client = None
            else:
                err_msg = "Cannot find preference file event_url: '%s'" % user_request.event_url
                self.logger.error(err_msg)
                raise ValueError(err_msg)

        # Add station clients and URLs or reference a local file
        if user_request.station_url in URL_MAPPINGS.keys():
            self.station_url = URL_MAPPINGS[user_request.station_url]
            self.station_client = Client(user_request.station_url)
        else:
            if os.path.exists(os.path.abspath(user_request.station_url)):
                # Get data from local StationXML files
                self.station_url = os.path.abspath(user_request.station_url)
                self.station_client = None
            else:
                err_msg = "Cannot find preference file station_url: '%s'" % user_request.station_url
                self.logger.error(err_msg)
                raise ValueError(err_msg)