def get_events(self, **kwargs): """ Fetches a catalog with event information. Parameters to narrow down the request are the same as for :meth:`get_list`. ..warning:: Only works when connecting to a SeisHub server of version 1.4.0 or higher (serving event data as QuakeML). :rtype: :class:`~obspy.core.event.Catalog` :returns: Catalog containing event information matching the request. The number of resulting events is by default limited to 50 entries from a SeisHub server. You may raise this by setting the ``limit`` option to a maximal value of 2500. Numbers above 2500 will result into an exception. """ resource_names = [item["resource_name"] for item in self.get_list(**kwargs)] cat = Catalog() for resource_name in resource_names: cat.extend(read_events(self.get_resource(resource_name))) return cat
def append_mt(cat, csv_fid=None): """ Append GeoNet moment tensor information to the events in catalog Assumes that all the events have moment tensor information, meaning check_moment_tensor() should be run prior to this :type cat: obspy.Catalog :param cat: catalog of events to add moment tensor information to :type csv_fid: str :param csv_fid: csv file containing moment tensor information """ cat_out = Catalog() events = [] for event in cat: event_id = event.resource_id.id.split('/')[1] event_out, _ = geonet_mt(event_id=event_id, units="dynecm", event=event, csv_fid=csv_fid) events.append(event_out) cat_out.extend(events) return cat_out
def get_events(self, **kwargs): """ Fetches a catalog with event information. Parameters to narrow down the request are the same as for :meth:`get_list`. .. warning:: Only works when connecting to a SeisHub server of version 1.4.0 or higher (serving event data as QuakeML). :rtype: :class:`~obspy.core.event.Catalog` :returns: Catalog containing event information matching the request. The number of resulting events is by default limited to 50 entries from a SeisHub server. You may raise this by setting the ``limit`` option to a maximal value of 2500. Numbers above 2500 will result into an exception. """ resource_names = [item["resource_name"] for item in self.get_list(**kwargs)] cat = Catalog() for resource_name in resource_names: cat.extend(read_events(self.get_resource(resource_name))) return cat
def cut_cat(cat, indices=None, method="remove"): """ Obspy's catalog object has no delete, so here it is! :type cat: obspy.Catalog :param cat: catalog object to cut down :type indices: list of ints :param indices: indices of the catalog to remove :type method: str :param method: "remove" to delete 'indices' from 'cat', "keep" to retain :rtype: obspy.Catalog :return: new catalog object cut from old one """ if indices is None: return cat new_cat = Catalog() if method == "remove": events = [e for i, e in enumerate(cat) if i not in indices] elif method == "keep": events = [e for i, e in enumerate(cat) if i in indices] new_cat.extend(events) return new_cat
class Request(object): """"Initialises the FDSN request for the waveforms, the preprocessing of the waveforms, and the creation of time domain receiver functions.""" def __init__(self, phase, rot, evtloc, statloc, rawloc, preproloc, rfloc, deconmeth, starttime, endtime, wavdownload=True, pol: str = 'v', minmag: float or int = 5.5, event_coords=None, network=None, station=None, waveform_client=None, re_client=['IRIS'], evtcat=None, debug=False): """ Create object that is used to start the receiver function workflow. :param phase: Arrival phase that is to be used as source phase. "S" to create S-Sp receiver functions and "P" for P-Ps receiver functions, "SKS" or "ScS" are allowed as well. :type phase: str :param rot: The coordinate system in that the seismogram should be rotated prior to deconvolution. Options are "RTZ" for radial, transverse, vertical; "LQT" for an orthogonal coordinate system computed by minimising primary energy on the converted component, or "PSS" for a rotation along the polarisation directions using the Litho1.0 surface wave tomography model. :type rot: str :param evtloc: Directory, in which to store the event catalogue (xml). :type evtloc: str :param statloc: Directory, in which to store the station inventories (xml). :type statloc: str :param rawloc: Directory, in which to store the raw waveform data. :type rawloc: str :param preproloc: Directory, in which to store the preprocessed waveform data (mseed). :type preproloc: str :param rfloc: Directory, in which to store the receiver functions in time domain (sac). :type rfloc: str :param deconmeth: The deconvolution method to use for the RF creation. Possible options are: 'it': iterative time domain deconvolution (Ligorria & Ammon, 1999) 'dampedf': damped frequency deconvolution 'fqd': frequency dependent damping - not a good choice for SRF 'waterlevel': Langston (1977) 'multit': for multitaper (Helffrich, 2006) False/None: don't create RFs :type deconmeth: str :param starttime: Earliest event date to be considered. :type starttime: ~obspy.UTCDateTime :param endtime: Latest event date to be considered. :type endtime: ~obspy.UTCDateTime :param wavdownload: Do you want to start a new download (True), update the current database (True) or only preprocess and create RFs from an existing database (False). False is a lot faster as all CPUs can be used and the preprocessing does not have to wait for the download, defaults to True. :type wavdownload: bool, optional :param pol: Polarisation to use as source wavelet. Either "v" for vertically polarised or 'h' for horizontally polarised S-waves. Will be ignored if phase='S', by default 'v'. :type pol: str, optional :param minmag: Minimum magnitude, by default 5.5 :type minmag: float, optional :param event_coords: In case you wish to constrain events to certain origns. Given in the form (minlat, maxlat, minlon, maxlon), by default None. :type event_coords: Tuple, optional :param network: Limit the dowloand and preprocessing to a certain network or several networks (if type==list). Wildcards are allowed, by default None., defaults to None :type network: str or list, optional :param station: Limit the download and preprocessing to a certain station or several stations. Use only if network!=None. Wildcards are allowed, by default None. :type station: str or list, optional :param waveform_client: List of FDSN compatible servers to download waveforms from. See obspy documentation for obspy.Client for allowed acronyms. A list of servers by region can be found at `<https://www.fdsn.org/webservices/datacenters/>`_. None means that all known servers are requested, defaults to None. :type waveform_client: list, optional :param re_client: Only relevant, when debug=True. List of servers that will be used if data is missing and the script will attempt a redownload, usually it's easier to just run a request several times. Same logic as for waveform_client applies, defaults to ['IRIS'] :type re_client: list, optional :param evtcat: In case you want to use an already existing event catalogue in evtloc. If None a new catalogue will be downloaded (with the parameters defined before), by default None, defaults to None :type evtcat: str, optional :param debug: If True, all loggers will go to DEBUG mode and all warnings will be shown. That will result in a lot of information being shown! Also joblib will fall back to using only few cores, by default False. :type debug: bool, optional :raises NameError: For invalid phases. """ # Allocate variables in self self.debug = debug self.wavdownload = wavdownload tmp.re_client = re_client # Set velocity model self.model = TauPyModel('iasp91') self.phase = phase[:-1] + phase[-1].upper() self.pol = pol.lower() self.rot = rot.upper() self.deconmeth = deconmeth # Directories self.logdir = os.path.join(os.path.dirname(os.path.abspath(statloc)), 'logs') os.makedirs(self.logdir, exist_ok=True) self.evtloc = evtloc self.statloc = statloc self.rawloc = os.path.join(rawloc, self.phase) self.preproloc = os.path.join(preproloc, self.phase) self.rfloc = os.path.join(rfloc, self.phase) # minimum magnitude self.minmag = minmag # Request time window self.starttime = starttime self.endtime = endtime # geographical constraints if event_coords: (self.eMINLAT, self.eMAXLAT, self.eMINLON, self.eMAXLON) = event_coords else: (self.eMINLAT, self.eMAXLAT, self.eMINLON, self.eMAXLON) = None, None, None, None # Set event depth and min/max epicentral distances # according to phase (see Wilson et. al., 2006) # and time window before (tz) and after (ta) first arrival self.ta = 120 if self.phase == 'P': self.maxdepth = None self.min_epid = 28.1 self.max_epid = 95.8 self.tz = 30 elif self.phase == 'S': self.maxdepth = 300 self.min_epid = 55 self.max_epid = 80 self.tz = 120 # (see Yuan et al. 2006) elif self.phase.upper() == 'SCS': self.maxdepth = 300 self.min_epid = 50 self.max_epid = 75 self.tz = 120 elif self.phase.upper() == 'SKS': # (see Zhang et. al. (2014)) self.maxdepth = 300 self.min_epid = 90 self.max_epid = 120 self.tz = 120 else: raise NameError( 'The phase', self.phase, """is not valid or not implemented yet.""") # network and station filters self.network = network self.station = station # Server settings # 2021/02/16 Events only from IRIS as the USGS webserice tends to be # unstable and mixing different services will lead to a messed db self.webclient = Webclient('IRIS') self.waveform_client = waveform_client self.re_client = re_client # Download or process available data? if evtcat: self.evtcat = read_events(os.path.join(self.evtloc, evtcat)) else: self.download_eventcat() def download_eventcat(self): event_cat_done = False while not event_cat_done: try: # Check length of request and split if longer than 20yrs. a = 20 * 365.25 * 24 * 3600 # 20 years in seconds if self.endtime - self.starttime > a: # Request is too big, break it down ito several requests starttimes = [self.starttime, self.starttime + a] while self.endtime - starttimes[-1] > a: starttimes.append(starttimes[-1] + a) endtimes = [] endtimes.extend(starttimes[1:]) endtimes.append(self.endtime) # Query self.evtcat = Catalog() for st, et in zip(starttimes, endtimes): self.evtcat.extend( self.webclient.get_events( starttime=st, endtime=et, minlatitude=self.eMINLAT, maxlatitude=self.eMAXLAT, minlongitude=self.eMINLON, maxlongitude=self.eMAXLON, minmagnitude=self.minmag, maxmagnitude=10, maxdepth=self.maxdepth)) event_cat_done = True else: self.evtcat = self.webclient.get_events( starttime=self.starttime, endtime=self.endtime, minlatitude=self.eMINLAT, maxlatitude=self.eMAXLAT, minlongitude=self.eMINLON, maxlongitude=self.eMAXLON, minmagnitude=self.minmag, maxmagnitude=10, maxdepth=self.maxdepth) event_cat_done = True except IncompleteRead: # Server interrupted connection, just try again msg = "Server interrupted connection, restarting download..." warn(msg, UserWarning) print(msg) continue os.makedirs(self.evtloc, exist_ok=True) # check if there is a better format for event catalog self.evtcat.write(os.path.join( self.evtloc, datetime.now().strftime("%Y%m%dT%H%M%S")), format="QUAKEML") def download_waveforms(self, verbose: bool = False): """ Start the download of waveforms and response files. Parameters ---------- verbose : Bool, optional Set True if you wish to log the output of the obspy MassDownloader. """ downloadwav(self.phase, self.min_epid, self.max_epid, self.model, self.evtcat, self.tz, self.ta, self.statloc, self.rawloc, self.waveform_client, network=self.network, station=self.station, logdir=self.logdir, debug=self.debug, verbose=verbose, saveasdf=False) def preprocess(self, hc_filt: float or int or None = None): """ Preprocess an existing database. With parameters defined in self. Parameters ---------- hc_filt : float or int or None, optional Highcut frequency to filter with right before deconvolution. Recommended if time domain deconvolution is used. For spectral division, filtering can still be done after deconvolution (i.e. set in :func:`~pyglimer.ccp.ccp.CCPStack.compute_stack()`). Value for PRFs should usually be lower than 2 Hz and for SRFs lower than .4 Hz, by default None. """ preprocess(self.phase, self.rot, self.pol, 0.05, self.evtcat, self.model, 'hann', self.tz, self.ta, self.statloc, self.rawloc, self.preproloc, self.rfloc, self.deconmeth, hc_filt, netrestr=self.network, statrestr=self.station, logdir=self.logdir, debug=self.debug)