def event(baseurl='IRIS', starttime=None, endtime=None, minmag=3., maxmag=10., maxnumber=10, catalog=None): try: client = Client(baseurl) print("Using " + baseurl + "...", file=sys.stderr) except: print("fdsn client failed") from obspy.clients.fdsn.header import URL_MAPPINGS for key in sorted(URL_MAPPINGS.keys()): print("{0:<7} {1}".format(key, URL_MAPPINGS[key]), file=sys.stderr) sys.exit() try: starttime = UTCDateTime(starttime) endtime = UTCDateTime(endtime) except: starttime = UTCDateTime() - 365 * 24 * 60 * 60 endtime = UTCDateTime() print('Auto time limit:', file=sys.stderr) print(starttime, file=sys.stderr) print(endtime, file=sys.stderr) #print("time conversion failed") #sys.exit() if catalog: cat = client.get_events(limit=maxnumber, orderby="magnitude", starttime=starttime, endtime=endtime, minmagnitude=minmag, maxmagnitude=maxmag, catalog=catalog) else: cat = client.get_events(limit=maxnumber, orderby="magnitude", starttime=starttime, endtime=endtime, minmagnitude=minmag, maxmagnitude=maxmag) print(cat.__str__(print_all=True), file=sys.stderr) for e in cat: print(e.resource_id)
def __init__(self, pyweed, parent=None): super(PreferencesDialog, self).__init__(parent=parent) self.setupUi(self) self.pyweed = pyweed # Ordered list of all available data centers self.data_centers = sorted(URL_MAPPINGS.keys()) # Put these in the comboboxes for data_center in self.data_centers: label = "%s: %s" % (data_center, URL_MAPPINGS[data_center]) self.eventDataCenterComboBox.addItem(label, data_center) self.stationDataCenterComboBox.addItem(label, data_center) self.okButton.pressed.connect(self.accept) self.cancelButton.pressed.connect(self.reject)
def getClients(starttime, endtime, lslat, lslon, radius=200.): """ Returns list of valid clients/network codes to request data from in getStreamObject, given landslide coordinates and a time range to search for data in. INPUTS starttime (UTCDateTime) - start time of stream object endtime (UTCDateTime) - end time of stream object lslat (float) - latitudinal coordinate of landslide (make negative for south of Equator) lslon (float) - longitudinal coordinate of landslide (make negative for west of Prime Meridian) radius (float) - optional; search radius in km for finding nearest seismic stations OUTPUT valid_clients (list of strings) - list of FDSN network codes that will return seismic traces """ # Full list of FDSN webservice clients full_client_list = [key for key in sorted(URL_MAPPINGS.keys())] valid_clients = [] # Seismic channels to search for channels = 'EHZ,BHZ,HHZ' # Search for data within initial radius print('Retrieving data from stations within %i km of event...' % int(radius)) for i in range(0, len(full_client_list)): client = full_client_list[i] try: reviewData.getepidata(lslat, lslon, starttime, tstart=0., tend=endtime - starttime, minradiuskm=0., maxradiuskm=radius, chanuse=channels, location='*', clientnames=client) valid_clients.append(client) except: pass return (valid_clients)
def event(baseurl='IRIS', playback='date', **kwargs): try: client = Client(baseurl) print("Using " + baseurl + "...", file=sys.stderr) except: print("fdsn client failed") from obspy.clients.fdsn.header import URL_MAPPINGS for key in sorted(URL_MAPPINGS.keys()): print("{0:<7} {1}".format(key, URL_MAPPINGS[key]), file=sys.stderr) sys.exit() try: kwargs['starttime'] = UTCDateTime(kwargs['starttime']) kwargs['endtime'] = UTCDateTime(kwargs['endtime']) except: kwargs['starttime'] = UTCDateTime() - 365 * 24 * 60 * 60 kwargs['endtime'] = UTCDateTime() print('Auto time limit:', file=sys.stderr) print(starttime, file=sys.stderr) print(endtime, file=sys.stderr) #print("time conversion failed") #sys.exit() cat = client.get_events(**kwargs) #if catalog: # cat = client.get_events(limit=maxnumber, orderby="magnitude",starttime=starttime, endtime=endtime, minmagnitude=minmag, maxmagnitude=maxmag, catalog=catalog) #else: # cat = client.get_events(limit=maxnumber, orderby="magnitude",starttime=starttime, endtime=endtime, minmagnitude=minmag, maxmagnitude=maxmag) print(cat.__str__(print_all=True), file=sys.stderr) if 'evid' in playback: for e in cat.events: print(playback % ("\"" + e.resource_id + "\"")) else: for e in cat.events: o = e.preferred_origin_id.get_referred_object() print(playback % ((o.time - 60 * 3 / 9).strftime("\"%Y-%m-%d %H:%M:%S\""), (o.time + 60 * 6 / 9).strftime("\"%Y-%m-%d %H:%M:%S\"")))
def __buildUI__(self): self.setWindowTitle('InfraView - Reconcile Stations') blurb = QLabel( self. tr('This will ATTEMPT to download the station info for the following Stations.' )) blurb.setWordWrap(True) # First lets populate the client drop down self.cb = QComboBox() label_service_name = QLabel(self.tr('Service: ')) for key in sorted(URL_MAPPINGS.keys()): self.cb.addItem(key) self.cb.setCurrentText('IRIS') self.stationListEdit = QListWidget() self.stationListEdit.setMinimumWidth(300) self.statusLine = QLabel(' ') self.attemptButton = QPushButton(self.tr('Attempt to Download')) # OK and Cancel buttons buttons = QDialogButtonBox( QDialogButtonBox.Ok | QDialogButtonBox.Cancel, Qt.Horizontal, self) buttons.button(QDialogButtonBox.Ok).setText('Add to Station List') buttons.accepted.connect(self.accept) buttons.rejected.connect(self.reject) layout = QVBoxLayout() layout.addWidget(blurb) layout.addWidget(label_service_name) layout.addWidget(self.cb) layout.addWidget(self.stationListEdit) layout.addWidget(self.attemptButton) layout.addWidget(self.statusLine) layout.addWidget(buttons) self.setLayout(layout) self.connectSignalsandSlots()
#!/usr/bin/env python from obspy.clients.fdsn.header import URL_MAPPINGS for key in sorted(URL_MAPPINGS.keys()): print("{0:<7} {1}".format(key, URL_MAPPINGS[key]))
def event(baseurl='IRIS', playback=None, days=1, qml=None, fin=None, tin=None, country=None, **kwargs): try: client = Client(baseurl) print("Using " + baseurl + "...", file=sys.stderr) except: print("fdsn client failed", file=sys.stderr) from obspy.clients.fdsn.header import URL_MAPPINGS for key in sorted(URL_MAPPINGS.keys()): print("{0:<7} {1}".format(key, URL_MAPPINGS[key]), file=sys.stderr) sys.exit() try: kwargs['starttime'] = UTCDateTime(kwargs['starttime']) kwargs['endtime'] = UTCDateTime(kwargs['endtime']) except: kwargs['starttime'] = UTCDateTime() - float(days) * 24 * 60 * 60 kwargs['endtime'] = UTCDateTime() print('kwargs:', kwargs, file=sys.stderr) try: cat = client.get_events(**kwargs) except: print('No response.', file=sys.stderr) sys.exit() if country is not None: ok = False limit = len(cat.events) kwargs['offset'] = len(cat.events) while not ok: ok = True remove = [] kwargs['limit'] = 0 for i, e in enumerate(cat.events): lalo = [ e.preferred_origin().latitude, e.preferred_origin().longitude ] gcode = geocoder.osm(lalo, method='reverse').json if gcode['country_code'].lower() not in country.lower(): kwargs['limit'] += 1 ok = False remove += [e] print('removing %d (%s, %s): %s (requesting %d after %d)' % (i, lalo[0], lalo[1], gcode['country_code'], kwargs['limit'], kwargs['offset']), file=sys.stderr) if not ok: for e in remove: cat.events.remove(e) if len(cat.events) >= limit: print('Clean stable catalog of %d events' % len(cat.events), file=sys.stderr) break print('kwargs:', kwargs, file=sys.stderr) try: tmp = client.get_events(**kwargs) except: print('No more events than %d' % len(cat.events), file=sys.stderr) break cat += tmp kwargs['offset'] += len(tmp.events) for e in cat.events: print("Event \"%s\":\t%s" % (str(e.resource_id), e.short_str()), file=sys.stderr) if qml is not None: cat.write(qml, format='SC3ML') if fin is not None: with open(fin, 'w') as f: f.write('\n'.join([str(e.resource_id) for e in cat.events]) + '\n') if tin is not None: with open(tin, 'w') as f: for e in cat.events: o = e.preferred_origin_id.get_referred_object() f.write('%s %s\n' % ((o.time - 60 * 3 / 9).strftime("%Y-%m-%dT%H:%M:%S"), (o.time + 60 * 6 / 9).strftime("%Y-%m-%dT%H:%M:%S"))) if playback is not None: if 'evid' in playback: for e in cat.events: print(playback % ("\"" + str(e.resource_id) + "\"")) else: for e in cat.events: o = e.preferred_origin_id.get_referred_object() print( playback % ((o.time - 60 * 3 / 9).strftime("\"%Y-%m-%d %H:%M:%S\""), (o.time + 60 * 6 / 9).strftime("\"%Y-%m-%d %H:%M:%S\"")))
def retrieveData(self): """Retrieve data from many FDSN services, turn into StreamCollection. Args: event (dict): Best dictionary matching input event, fields as above in return of getMatchingEvents(). Returns: StreamCollection: StreamCollection object. """ # Bail out if FDSNFetcher not configured if 'FDSNFetcher' not in self.config['fetchers']: return rawdir = self.rawdir if self.rawdir is None: rawdir = tempfile.mkdtemp() else: if not os.path.isdir(rawdir): os.makedirs(rawdir) # use the mass downloader to retrieve data of interest from any FSDN # service. origin_time = UTCDateTime(self.time) # The Obspy mass downloader has it's own logger - grab that stream # and write it to our own log file ldict = logging.Logger.manager.loggerDict if OBSPY_LOGGER in ldict: root = logging.getLogger() fhandler = root.handlers[0] obspy_logger = logging.getLogger(OBSPY_LOGGER) obspy_stream_handler = obspy_logger.handlers[0] obspy_logger.removeHandler(obspy_stream_handler) obspy_logger.addHandler(fhandler) # Circular domain around the epicenter. domain = CircularDomain(latitude=self.lat, longitude=self.lon, minradius=0, maxradius=self.radius) min_dist = self.minimum_interstation_distance_in_m restrictions = Restrictions( # Define the temporal bounds of the waveform data. starttime=origin_time - self.time_before, endtime=origin_time + self.time_after, network=self.network, station='*', location='*', location_priorities=['*'], reject_channels_with_gaps=self.reject_channels_with_gaps, # Any trace that is shorter than 95 % of the # desired total duration will be discarded. minimum_length=self.minimum_length, sanitize=self.sanitize, minimum_interstation_distance_in_m=min_dist, exclude_networks=self.exclude_networks, exclude_stations=self.exclude_stations, channel_priorities=self.channels) # For each of the providers, check if we have a username and password # provided in the config. If we do, initialize the client with the # username and password. Otherwise, use default initalization. client_list = [] for provider_str in URL_MAPPINGS.keys(): if provider_str == GEO_NET_ARCHIVE_KEY: dt = UTCDateTime.utcnow() - UTCDateTime(self.time) if dt < GEONET_ARCHIVE_DAYS: provider_str = GEONET_REALTIME_URL try: fdsn_config = self.config['fetchers']['FDSNFetcher'] if provider_str in fdsn_config: client = Client( provider_str, user=fdsn_config[provider_str]['user'], password=fdsn_config[provider_str]['password']) else: client = Client(provider_str) client_list.append(client) # If the FDSN service is down, then an FDSNException is raised except FDSNException: logging.warning('Unable to initalize client %s' % provider_str) except KeyError: logging.warning('Unable to initalize client %s' % provider_str) if len(client_list): # Pass off the initalized clients to the Mass Downloader mdl = MassDownloader(providers=client_list) logging.info('Downloading new MiniSEED files...') # The data will be downloaded to the ``./waveforms/`` and # ``./stations/`` folders with automatically chosen file names. mdl.download(domain, restrictions, mseed_storage=rawdir, stationxml_storage=rawdir) seed_files = glob.glob(os.path.join(rawdir, '*.mseed')) streams = [] for seed_file in seed_files: try: tstreams = read_obspy(seed_file, self.config) except BaseException as e: tstreams = None fmt = 'Could not read seed file %s - "%s"' logging.info(fmt % (seed_file, str(e))) if tstreams is None: continue else: streams += tstreams stream_collection = StreamCollection( streams=streams, drop_non_free=self.drop_non_free) return stream_collection
def __init__(self, user_request=None, logger=None): """ Initializes the ISPAQ data access expediter. See :mod:`ispaq.concierge` for all parameters. """ # Keep the entire UserRequest and logger self.user_request = user_request self.logger = logger # Copy important UserRequest properties to the Concierge for smpler access self.requested_starttime = user_request.requested_starttime self.requested_endtime = user_request.requested_endtime self.metric_names = user_request.metrics self.sncl_patterns = user_request.sncls self.function_by_logic = user_request.function_by_logic self.logic_types = user_request.function_by_logic.keys() # Individual elements from the Preferences: section of the preferences file self.csv_output_dir = user_request.csv_output_dir self.plot_output_dir = user_request.plot_output_dir self.sigfigs = user_request.sigfigs # Output information file_base = '%s_%s_%s' % (self.user_request.requested_metric_set, self.user_request.requested_sncl_set, self.requested_starttime.date) self.output_file_base = self.csv_output_dir + '/' + file_base # Availability dataframe is stored if it is read from a local file self.availability = None # Filtered availability dataframe is stored for potential reuse self.filtered_availability = None # Add dataselect clients and URLs or reference a local file if user_request.dataselect_url in URL_MAPPINGS.keys(): # Get data from FDSN dataselect service self.dataselect_url = URL_MAPPINGS[user_request.dataselect_url] self.dataselect_client = Client(user_request.dataselect_url) else: if os.path.exists(os.path.abspath(user_request.dataselect_url)): # Get data from local miniseed files self.dataselect_url = os.path.abspath(user_request.dataselect_url) self.dataselect_client = None else: err_msg = "Cannot find preference file dataselect_url: '%s'" % user_request.dataselect_url self.logger.error(err_msg) raise ValueError(err_msg) # Add event clients and URLs or reference a local file if user_request.event_url in URL_MAPPINGS.keys(): self.event_url = URL_MAPPINGS[user_request.event_url] self.event_client = Client(user_request.event_url) else: if os.path.exists(os.path.abspath(user_request.event_url)): # Get data from local QUAKEML files self.event_url = os.path.abspath(user_request.event_url) self.event_client = None else: err_msg = "Cannot find preference file event_url: '%s'" % user_request.event_url self.logger.error(err_msg) raise ValueError(err_msg) # Add station clients and URLs or reference a local file if user_request.station_url in URL_MAPPINGS.keys(): self.station_url = URL_MAPPINGS[user_request.station_url] self.station_client = Client(user_request.station_url) else: if os.path.exists(os.path.abspath(user_request.station_url)): # Get data from local StationXML files self.station_url = os.path.abspath(user_request.station_url) self.station_client = None else: err_msg = "Cannot find preference file station_url: '%s'" % user_request.station_url self.logger.error(err_msg) raise ValueError(err_msg)
def __buildUI__(self): # Put together the options container gridLayout = QGridLayout() optionsContainer = QWidget() optionsContainer.setLayout(gridLayout) # First lets populate the client drop down self.cb = QComboBox() label_service_name = QLabel(self.tr('Service:')) fdsn_dictionary = URL_MAPPINGS fdsn_dictionary.update( {'RaspShake': 'https://fdsnws.rasberryshakedata.com'}) for key in sorted(URL_MAPPINGS.keys()): self.cb.addItem(key) self.cb.setCurrentText('IRIS') self.cb.currentIndexChanged[str].connect(self.onActivated_cb) validator = IPValidator(self) label_network_name = QLabel(self.tr('Network: ')) self.networkNameBox = QLineEdit() self.networkNameBox.setToolTip( 'Wildcards OK \nCan be SEED network codes or data center defined codes. \nMultiple codes are comma-separated (e.g. "IU,TA").' ) self.networkNameBox.setValidator(validator) label_station_name = QLabel(self.tr('Station: ')) self.stationNameBox = QLineEdit() self.stationNameBox.setToolTip( 'Wildcards OK \nOne or more SEED station codes. \nMultiple codes are comma-separated (e.g. "ANMO,PFO")' ) self.stationNameBox.setValidator(validator) label_location_str = QLabel(self.tr('Location:')) self.location_Box = QLineEdit('*') self.location_Box.setToolTip( 'Wildcards OK \nOne or more SEED location identifiers. \nMultiple identifiers are comma-separated (e.g. "00,01"). \nAs a special case “--“ (two dashes) will be translated to a string of two space characters to match blank location IDs.' ) self.location_Box.setValidator(validator) label_channel_str = QLabel(self.tr('Channel:')) self.channel_Box = QLineEdit('*') self.channel_Box.setToolTip( 'Wildcards OK \nOne or more SEED channel codes. \nMultiple codes are comma-separated (e.g. "BHZ,HHZ")' ) self.channel_Box.setValidator(validator) label_startDate = QLabel(self.tr('Start Date (UTC):')) self.startDate_edit = QDateEdit() self.startDate_edit.setMinimumDate(QDate(1900, 1, 1)) self.startDate_edit.setDisplayFormat('yyyy-MM-dd') self.startDate_edit.setDate(self.startDate_edit.minimumDate()) label_startTime = QLabel(self.tr('Start Time (UTC):')) self.startTime_edit = QTimeEdit() self.startTime_edit.setDisplayFormat('HH:mm:ss.zzz') label_traceLength = QLabel(self.tr('Trace Length (s)')) self.traceLength_t = QSpinBox() self.traceLength_t.setMinimum(1) self.traceLength_t.setMaximum(999999999) self.traceLength_t.setValue(3600) replaceWaveButton = QPushButton('Replace') replaceWaveButton.clicked.connect(self.onClicked_replace) appendWaveButton = QPushButton('Append') appendWaveButton.clicked.connect(self.onClicked_append) self.stationListWidget = QListWidget() self.stationListWidget.setSelectionMode( QAbstractItemView.ExtendedSelection) self.stationListWidget.itemSelectionChanged.connect( self.populateStationInfoFromStationList) self.browserButton = QPushButton('Station Browser') self.browserButton.clicked.connect(self.onClicked_browserButton) gridLayout.addWidget(label_service_name, 0, 0) gridLayout.addWidget(self.cb, 0, 1) gridLayout.addWidget(label_network_name, 1, 0) gridLayout.addWidget(self.networkNameBox, 1, 1) gridLayout.addWidget(label_station_name, 2, 0) gridLayout.addWidget(self.stationNameBox, 2, 1) gridLayout.addWidget(label_location_str, 3, 0) gridLayout.addWidget(self.location_Box, 3, 1) gridLayout.addWidget(label_channel_str, 4, 0) gridLayout.addWidget(self.channel_Box, 4, 1) gridLayout.addWidget(label_startDate, 5, 0) gridLayout.addWidget(self.startDate_edit, 5, 1) gridLayout.addWidget(label_startTime, 6, 0) gridLayout.addWidget(self.startTime_edit, 6, 1) gridLayout.addWidget(label_traceLength, 7, 0) gridLayout.addWidget(self.traceLength_t, 7, 1) # gridLayout.addWidget(importEventButton, 8, 1, 1, 2) horzLayout = QHBoxLayout(self) horzLayout.addWidget(replaceWaveButton) horzLayout.addWidget(appendWaveButton) addGroupBox = QGroupBox("Get Waveform(s)") addGroupBox.setLayout(horzLayout) vertlayout = QVBoxLayout(self) vertlayout.addWidget(optionsContainer) vertlayout.addWidget(addGroupBox) vertlayout.addWidget(self.stationListWidget) vertlayout.addWidget(self.browserButton) self.setLayout(vertlayout) # create stationdialog here so that you only create it once, from here on you just run exec_() to make it pop up self.stationDialog = IPStationBrowser.IPStationDialog()
def retrieveData(self): """Retrieve data from many FDSN services, turn into StreamCollection. Args: event (dict): Best dictionary matching input event, fields as above in return of getMatchingEvents(). Returns: StreamCollection: StreamCollection object. """ rawdir = self.rawdir if self.rawdir is None: rawdir = tempfile.mkdtemp() else: if not os.path.isdir(rawdir): os.makedirs(rawdir) # use the mass downloader to retrieve data of interest from any FSDN # service. origin_time = UTCDateTime(self.time) # The Obspy mass downloader has it's own logger - grab that stream # and write it to our own log file ldict = logging.Logger.manager.loggerDict if OBSPY_LOGGER in ldict: root = logging.getLogger() fhandler = root.handlers[0] obspy_logger = logging.getLogger(OBSPY_LOGGER) obspy_stream_handler = obspy_logger.handlers[0] obspy_logger.removeHandler(obspy_stream_handler) obspy_logger.addHandler(fhandler) # Circular domain around the epicenter. domain = CircularDomain(latitude=self.lat, longitude=self.lon, minradius=0, maxradius=self.radius) restrictions = Restrictions( # Define the temporal bounds of the waveform data. starttime=origin_time - self.time_before, endtime=origin_time + self.time_after, network=self.network, station='*', location='*', location_priorities=['*'], reject_channels_with_gaps=self.reject_channels_with_gaps, # Any trace that is shorter than 95 % of the # desired total duration will be discarded. minimum_length=self.minimum_length, sanitize=self.sanitize, minimum_interstation_distance_in_m=self. minimum_interstation_distance_in_m, exclude_networks=self.exclude_networks, exclude_stations=self.exclude_stations, channel_priorities=self.channels) # DEBUGGING pp = pprint.PrettyPrinter() pp.pprint(domain.__dict__) print('***************************') pp.pprint(restrictions.__dict__) # DEBUGGING # For each of the providers, check if we have a username and password # provided in the config. If we do, initialize the client with the # username and password. Otherwise, use default initalization. fdsn_config = self.config['fetchers']['FDSNFetcher'] client_list = [] for provider_str in URL_MAPPINGS.keys(): if provider_str in fdsn_config: client = Client(provider_str, user=fdsn_config[provider_str]['user'], password=fdsn_config[provider_str]['password']) else: client = Client(provider_str) client_list.append(client) # Pass off the initalized clients to the Mass Downloader mdl = MassDownloader(providers=client_list) # we can have a problem of file overlap, so let's remove existing # mseed files from the raw directory. logging.info('Deleting old MiniSEED files...') delete_old_files(rawdir, '*.mseed') # remove existing png files as well logging.info('Deleting old PNG files...') delete_old_files(rawdir, '*.png') # remove existing xml files as well logging.info('Deleting old XML files...') delete_old_files(rawdir, '*.xml') logging.info('Downloading new MiniSEED files...') # The data will be downloaded to the ``./waveforms/`` and ``./stations/`` # folders with automatically chosen file names. mdl.download(domain, restrictions, mseed_storage=rawdir, stationxml_storage=rawdir) seed_files = glob.glob(os.path.join(rawdir, '*.mseed')) streams = [] for seed_file in seed_files: tstreams = read_fdsn(seed_file) streams += tstreams stream_collection = StreamCollection(streams=streams, drop_non_free=self.drop_non_free) return stream_collection