def test_sac_template_gen(self): """Test template generation.""" samp_rate = 20 length = 8 for event in ['2014p611252', 'No_head']: test_files = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'test_data', 'SAC', event, '*') # Test with various input types filelist = glob.glob(test_files) streamlist = [read(f) for f in glob.glob(test_files)] stream = read(test_files) for sac_files in [filelist, streamlist, stream]: templates = from_sac(sac_files, lowcut=2.0, highcut=8.0, samp_rate=samp_rate, filt_order=4, length=length, swin='all', prepick=0.1, debug=0, plot=False) self.assertEqual(len(templates), 1) template = templates[0] self.assertEqual(len(template), len(sactoevent(stream).picks)) for tr in template: self.assertEqual(len(tr.data), length * samp_rate)
def template_gen(method, lowcut, highcut, samp_rate, filt_order, length, prepick, swin="all", process_len=86400, all_horiz=False, delayed=True, plot=False, plotdir=None, return_event=False, min_snr=None, parallel=False, num_cores=False, save_progress=False, skip_short_chans=False, **kwargs): """ Generate processed and cut waveforms for use as templates. :type method: str :param method: Template generation method, must be one of ('from_client', 'from_seishub', 'from_sac', 'from_meta_file'). - Each method requires associated arguments, see note below. :type lowcut: float :param lowcut: Low cut (Hz), if set to None will not apply a lowcut. :type highcut: float :param highcut: High cut (Hz), if set to None will not apply a highcut. :type samp_rate: float :param samp_rate: New sampling rate in Hz. :type filt_order: int :param filt_order: Filter level (number of corners). :type length: float :param length: Length of template waveform in seconds. :type prepick: float :param prepick: Pre-pick time in seconds :type swin: str :param swin: P, S, P_all, S_all or all, defaults to all: see note in :func:`eqcorrscan.core.template_gen.template_gen` :type process_len: int :param process_len: Length of data in seconds to download and process. :type all_horiz: bool :param all_horiz: To use both horizontal channels even if there is only a pick on one of them. Defaults to False. :type delayed: bool :param delayed: If True, each channel will begin relative to it's own \ pick-time, if set to False, each channel will begin at the same time. :type plot: bool :param plot: Plot templates or not. :type plotdir: str  :param plotdir: The path to save plots to. If `plotdir=None` (default) then the figure will be shown on screen. :type return_event: bool :param return_event: Whether to return the event and process length or not. :type min_snr: float :param min_snr: Minimum signal-to-noise ratio for a channel to be included in the template, where signal-to-noise ratio is calculated as the ratio of the maximum amplitude in the template window to the rms amplitude in the whole window given. :type parallel: bool :param parallel: Whether to process data in parallel or not. :type num_cores: int :param num_cores: Number of cores to try and use, if False and parallel=True, will use either all your cores, or as many traces as in the data (whichever is smaller). :type save_progress: bool :param save_progress: Whether to save the resulting templates at every data step or not. Useful for long-running processes. :type skip_short_chans: bool :param skip_short_chans: Whether to ignore channels that have insufficient length data or not. Useful when the quality of data is not known, e.g. when downloading old, possibly triggered data from a datacentre :returns: List of :class:`obspy.core.stream.Stream` Templates :rtype: list .. note:: *Method specific arguments:* - `from_client` requires: :param str client_id: string passable by obspy to generate Client, or a Client instance :param `obspy.core.event.Catalog` catalog: Catalog of events to generate template for :param float data_pad: Pad length for data-downloads in seconds - `from_seishub` requires: :param str url: url to seishub database :param `obspy.core.event.Catalog` catalog: Catalog of events to generate template for :param float data_pad: Pad length for data-downloads in seconds - `from_sac` requires: :param list sac_files: osbpy.core.stream.Stream of sac waveforms, or list of paths to sac waveforms. .. note:: See `eqcorrscan.utils.sac_util.sactoevent` for details on how pick information is collected. - `from_meta_file` requires: :param str meta_file: Path to obspy-readable event file, or an obspy Catalog :param `obspy.core.stream.Stream` st: Stream containing waveform data for template. Note that this should be the same length of stream as you will use for the continuous detection, e.g. if you detect in day-long files, give this a day-long file! :param bool process: Whether to process the data or not, defaults to True. .. note:: process_len should be set to the same length as used when computing detections using match_filter.match_filter, e.g. if you read in day-long data for match_filter, process_len should be 86400. .. rubric:: Example >>> from obspy.clients.fdsn import Client >>> from eqcorrscan.core.template_gen import template_gen >>> client = Client('NCEDC') >>> catalog = client.get_events(eventid='72572665', includearrivals=True) >>> # We are only taking two picks for this example to speed up the >>> # example, note that you don't have to! >>> catalog[0].picks = catalog[0].picks[0:2] >>> templates = template_gen( ... method='from_client', catalog=catalog, client_id='NCEDC', ... lowcut=2.0, highcut=9.0, samp_rate=20.0, filt_order=4, length=3.0, ... prepick=0.15, swin='all', process_len=300, all_horiz=True) >>> templates[0].plot(equal_scale=False, size=(800,600)) # doctest: +SKIP .. figure:: ../../plots/template_gen.from_client.png .. rubric:: Example >>> from obspy import read >>> from eqcorrscan.core.template_gen import template_gen >>> # Get the path to the test data >>> import eqcorrscan >>> import os >>> TEST_PATH = os.path.dirname(eqcorrscan.__file__) + '/tests/test_data' >>> st = read(TEST_PATH + '/WAV/TEST_/' + ... '2013-09-01-0410-35.DFDPC_024_00') >>> quakeml = TEST_PATH + '/20130901T041115.xml' >>> templates = template_gen( ... method='from_meta_file', meta_file=quakeml, st=st, lowcut=2.0, ... highcut=9.0, samp_rate=20.0, filt_order=3, length=2, prepick=0.1, ... swin='S', all_horiz=True) >>> print(len(templates[0])) 10 >>> templates = template_gen( ... method='from_meta_file', meta_file=quakeml, st=st, lowcut=2.0, ... highcut=9.0, samp_rate=20.0, filt_order=3, length=2, prepick=0.1, ... swin='S_all', all_horiz=True) >>> print(len(templates[0])) 15 .. rubric:: Example >>> from eqcorrscan.core.template_gen import template_gen >>> import glob >>> # Get all the SAC-files associated with one event. >>> sac_files = glob.glob(TEST_PATH + '/SAC/2014p611252/*') >>> templates = template_gen( ... method='from_sac', sac_files=sac_files, lowcut=2.0, highcut=10.0, ... samp_rate=25.0, filt_order=4, length=2.0, swin='all', prepick=0.1, ... all_horiz=True) >>> print(templates[0][0].stats.sampling_rate) 25.0 >>> print(len(templates[0])) 15 """ client_map = {'from_client': 'fdsn', 'from_seishub': 'seishub'} assert method in ('from_client', 'from_seishub', 'from_meta_file', 'from_sac') if not isinstance(swin, list): swin = [swin] process = True if method in ['from_client', 'from_seishub']: catalog = kwargs.get('catalog', Catalog()) data_pad = kwargs.get('data_pad', 90) # Group catalog into days and only download the data once per day sub_catalogs = _group_events(catalog=catalog, process_len=process_len, template_length=length, data_pad=data_pad) if method == 'from_client': if isinstance(kwargs.get('client_id'), str): client = FDSNClient(kwargs.get('client_id', None)) else: client = kwargs.get('client_id', None) available_stations = [] else: client = SeisHubClient(kwargs.get('url', None), timeout=10) available_stations = client.waveform.get_station_ids() elif method == 'from_meta_file': if isinstance(kwargs.get('meta_file'), Catalog): catalog = kwargs.get('meta_file') elif kwargs.get('meta_file'): catalog = read_events(kwargs.get('meta_file')) elif kwargs.get('catalog'): catalog = kwargs.get('catalog') sub_catalogs = [catalog] st = kwargs.get('st', Stream()) process = kwargs.get('process', True) elif method == 'from_sac': sac_files = kwargs.get('sac_files') if isinstance(sac_files, list): if isinstance(sac_files[0], (Stream, Trace)): # This is a list of streams... st = Stream(sac_files[0]) for sac_file in sac_files[1:]: st += sac_file else: sac_files = [read(sac_file)[0] for sac_file in sac_files] st = Stream(sac_files) else: st = sac_files # Make an event object... catalog = Catalog([sactoevent(st)]) sub_catalogs = [catalog] temp_list = [] process_lengths = [] if "P_all" in swin or "S_all" in swin or all_horiz: all_channels = True else: all_channels = False for sub_catalog in sub_catalogs: if method in ['from_seishub', 'from_client']: Logger.info("Downloading data") st = _download_from_client(client=client, client_type=client_map[method], catalog=sub_catalog, data_pad=data_pad, process_len=process_len, available_stations=available_stations, all_channels=all_channels) Logger.info('Pre-processing data') st.merge() if len(st) == 0: Logger.info("No data") continue if process: data_len = max( [len(tr.data) / tr.stats.sampling_rate for tr in st]) if 80000 < data_len < 90000: daylong = True starttime = min([tr.stats.starttime for tr in st]) min_delta = min([tr.stats.delta for tr in st]) # Cope with the common starttime less than 1 sample before the # start of day. if (starttime + min_delta).date > starttime.date: starttime = (starttime + min_delta) # Check if this is stupid: if abs(starttime - UTCDateTime(starttime.date)) > 600: print(abs(starttime - UTCDateTime(starttime.date))) daylong = False starttime = starttime.date else: daylong = False # Check if the required amount of data have been downloaded - skip # channels if arg set. if skip_short_chans: _st = Stream() for tr in st: if np.ma.is_masked(tr.data): _len = np.ma.count(tr.data) * tr.stats.delta else: _len = tr.stats.npts * tr.stats.delta if _len < process_len * .8: Logger.info( "Data for {0} are too short, skipping".format( tr.id)) else: _st += tr st = _st if len(st) == 0: Logger.info("No data") continue if daylong: st = pre_processing.dayproc(st=st, lowcut=lowcut, highcut=highcut, filt_order=filt_order, samp_rate=samp_rate, parallel=parallel, starttime=UTCDateTime(starttime), num_cores=num_cores) else: st = pre_processing.shortproc(st=st, lowcut=lowcut, highcut=highcut, filt_order=filt_order, parallel=parallel, samp_rate=samp_rate, num_cores=num_cores) data_start = min([tr.stats.starttime for tr in st]) data_end = max([tr.stats.endtime for tr in st]) for event in sub_catalog: stations, channels, st_stachans = ([], [], []) if len(event.picks) == 0: Logger.warning('No picks for event {0}'.format( event.resource_id)) continue use_event = True # Check that the event is within the data for pick in event.picks: if not data_start < pick.time < data_end: Logger.warning( "Pick outside of data span: Pick time {0} Start " "time {1} End time: {2}".format( str(pick.time), str(data_start), str(data_end))) use_event = False if not use_event: Logger.error('Event is not within data time-span') continue # Read in pick info Logger.debug("I have found the following picks") for pick in event.picks: if not pick.waveform_id: Logger.warning( 'Pick not associated with waveforms, will not use:' ' {0}'.format(pick)) continue Logger.debug(pick) stations.append(pick.waveform_id.station_code) channels.append(pick.waveform_id.channel_code) # Check to see if all picks have a corresponding waveform for tr in st: st_stachans.append('.'.join( [tr.stats.station, tr.stats.channel])) # Cut and extract the templates template = _template_gen(event.picks, st, length, swin, prepick=prepick, plot=plot, all_horiz=all_horiz, delayed=delayed, min_snr=min_snr, plotdir=plotdir) process_lengths.append(len(st[0].data) / samp_rate) temp_list.append(template) if save_progress: if not os.path.isdir("eqcorrscan_temporary_templates"): os.makedirs("eqcorrscan_temporary_templates") for template in temp_list: template.write( "eqcorrscan_temporary_templates{0}{1}.ms".format( os.path.sep, template[0].stats.starttime), format="MSEED") del st if return_event: return temp_list, catalog, process_lengths return temp_list
def from_sac(sac_files, lowcut, highcut, samp_rate, filt_order, length, swin, prepick=0.05, debug=0, plot=False): """Function to read picks and waveforms from SAC data, and generate a \ template from these. :type sac_files: list or stream :param sac_files: List or stream of sac waveforms, or list of paths to \ sac waveforms. :type lowcut: float :param lowcut: Low cut (Hz), if set to None will look in template \ defaults file :type highcut: float :param highcut: High cut (Hz), if set to None will look in template \ defaults file :type samp_rate: float :param samp_rate: New sampling rate in Hz, if set to None will look in \ template defaults file :type filt_order: int :param filt_order: Filter level, if set to None will look in \ template defaults file :type swin: str :param swin: Either 'all', 'P' or 'S', to select which phases to output. :type length: float :param length: Extract length in seconds, if None will look in template \ defaults file. :type prepick: float :param prepick: Length to extract prior to the pick in seconds. :type debug: int :param debug: Debug level, higher number=more output. :type plot: bool :param plot: Turns template plotting on or off. :returns: obspy.Stream Newly cut template .. note:: This functionality is not supported for obspy versions below \ 1.0.0 as references times are not read in by SACIO, which are needed \ for defining pick times. """ from obspy import read, Stream from eqcorrscan.utils.sac_util import sactoevent from eqcorrscan.utils import pre_processing # Check whether sac_files is a stream or a list if isinstance(sac_files, list): if isinstance(sac_files[0], str) or isinstance(sac_files[0], unicode): sac_files = [read(sac_file)[0] for sac_file in sac_files] if isinstance(sac_files[0], Stream): # This is a list of streams... st = sac_files[0] for sac_file in sac_files[1:]: st += sac_file st = Stream(sac_files) elif isinstance(sac_files, Stream): st = sac_files # Make an event object... event = sactoevent(st) # Process the data st.merge(fill_value='interpolate') st = pre_processing.shortproc(st, lowcut, highcut, filt_order, samp_rate, debug) template = _template_gen(picks=event.picks, st=st, length=length, swin=swin, prepick=prepick, plot=plot) return template
def custom_template_gen(method, lowcut, highcut, samp_rate, filt_order, length, prepick, swin="all", process_len=86400, all_horiz=False, delayed=True, plot=False, plotdir=None, return_event=False, min_snr=None, parallel=False, num_cores=False, save_progress=False, skip_short_chans=False, **kwargs): """ Generate processed and cut waveforms for use as templates. :type method: str :param method: Template generation method, must be one of ('from_client', 'from_seishub', 'from_sac', 'from_meta_file'). - Each method requires associated arguments, see note below. :type lowcut: float :param lowcut: Low cut (Hz), if set to None will not apply a lowcut. :type highcut: float :param highcut: High cut (Hz), if set to None will not apply a highcut. :type samp_rate: float :param samp_rate: New sampling rate in Hz. :type filt_order: int :param filt_order: Filter level (number of corners). :type length: float :param length: Length of template waveform in seconds. :type prepick: float :param prepick: Pre-pick time in seconds :type swin: str :param swin: P, S, P_all, S_all or all, defaults to all: see note in :func:`eqcorrscan.core.template_gen.template_gen` :type process_len: int :param process_len: Length of data in seconds to download and process. :type all_horiz: bool :param all_horiz: To use both horizontal channels even if there is only a pick on one of them. Defaults to False. :type delayed: bool :param delayed: If True, each channel will begin relative to it's own \ pick-time, if set to False, each channel will begin at the same time. :type plot: bool :param plot: Plot templates or not. :type plotdir: str :param plotdir: The path to save plots to. If `plotdir=None` (default) then the figure will be shown on screen. :type return_event: bool :param return_event: Whether to return the event and process length or not. :type min_snr: float :param min_snr: Minimum signal-to-noise ratio for a channel to be included in the template, where signal-to-noise ratio is calculated as the ratio of the maximum amplitude in the template window to the rms amplitude in the whole window given. :type parallel: bool :param parallel: Whether to process data in parallel or not. :type num_cores: int :param num_cores: Number of cores to try and use, if False and parallel=True, will use either all your cores, or as many traces as in the data (whichever is smaller). :type save_progress: bool :param save_progress: Whether to save the resulting templates at every data step or not. Useful for long-running processes. :type skip_short_chans: bool :param skip_short_chans: Whether to ignore channels that have insufficient length data or not. Useful when the quality of data is not known, e.g. when downloading old, possibly triggered data from a datacentre :returns: List of :class:`obspy.core.stream.Stream` Templates :rtype: list """ client_map = {'from_client': 'fdsn', 'from_seishub': 'seishub'} assert method in ('from_client', 'from_seishub', 'from_meta_file', 'from_sac') if not isinstance(swin, list): swin = [swin] process = True if method in ['from_client', 'from_seishub']: catalog = kwargs.get('catalog', Catalog()) data_pad = kwargs.get('data_pad', 90) # Group catalog into days and only download the data once per day sub_catalogs = _group_events(catalog=catalog, process_len=process_len, template_length=length, data_pad=data_pad) if method == 'from_client': if isinstance(kwargs.get('client_id'), str): client = FDSNClient(kwargs.get('client_id', None)) else: client = kwargs.get('client_id', None) available_stations = [] else: client = SeisHubClient(kwargs.get('url', None), timeout=10) available_stations = client.waveform.get_station_ids() elif method == 'from_meta_file': if isinstance(kwargs.get('meta_file'), Catalog): catalog = kwargs.get('meta_file') elif kwargs.get('meta_file'): catalog = read_events(kwargs.get('meta_file')) else: catalog = kwargs.get('catalog') sub_catalogs = [catalog] st = kwargs.get('st', Stream()) process = kwargs.get('process', True) elif method == 'from_sac': sac_files = kwargs.get('sac_files') if isinstance(sac_files, list): if isinstance(sac_files[0], (Stream, Trace)): # This is a list of streams... st = Stream(sac_files[0]) for sac_file in sac_files[1:]: st += sac_file else: sac_files = [read(sac_file)[0] for sac_file in sac_files] st = Stream(sac_files) else: st = sac_files # Make an event object... catalog = Catalog([sactoevent(st)]) sub_catalogs = [catalog] temp_list = [] process_lengths = [] catalog_out = Catalog() if "P_all" in swin or "S_all" in swin or all_horiz: all_channels = True else: all_channels = False for sub_catalog in sub_catalogs: if method in ['from_seishub', 'from_client']: Logger.info("Downloading data") st = _download_from_client(client=client, client_type=client_map[method], catalog=sub_catalog, data_pad=data_pad, process_len=process_len, available_stations=available_stations, all_channels=all_channels) Logger.info('Pre-processing data') st.merge() if len(st) == 0: Logger.info("No data") continue if process: data_len = max( [len(tr.data) / tr.stats.sampling_rate for tr in st]) if 80000 < data_len < 90000: daylong = True starttime = min([tr.stats.starttime for tr in st]) min_delta = min([tr.stats.delta for tr in st]) # Cope with the common starttime less than 1 sample before the # start of day. if (starttime + min_delta).date > starttime.date: starttime = (starttime + min_delta) # Check if this is stupid: if abs(starttime - UTCDateTime(starttime.date)) > 600: daylong = False starttime = starttime.date else: daylong = False # Check if the required amount of data have been downloaded - skip # channels if arg set. for tr in st: if np.ma.is_masked(tr.data): _len = np.ma.count(tr.data) * tr.stats.delta else: _len = tr.stats.npts * tr.stats.delta if _len < process_len * .8: Logger.info("Data for {0} are too short, skipping".format( tr.id)) if skip_short_chans: continue # Trim to enforce process-len tr.data = tr.data[0:int(process_len * tr.stats.sampling_rate)] if len(st) == 0: Logger.info("No data") continue if daylong: st = pre_processing.dayproc(st=st, lowcut=lowcut, highcut=highcut, filt_order=filt_order, samp_rate=samp_rate, parallel=parallel, starttime=UTCDateTime(starttime), num_cores=num_cores) else: st = pre_processing.shortproc(st=st, lowcut=lowcut, highcut=highcut, filt_order=filt_order, parallel=parallel, samp_rate=samp_rate, num_cores=num_cores) data_start = min([tr.stats.starttime for tr in st]) data_end = max([tr.stats.endtime for tr in st]) for event in sub_catalog: stations, channels, st_stachans = ([], [], []) if len(event.picks) == 0: Logger.warning('No picks for event {0}'.format( event.resource_id)) continue use_event = True # Check that the event is within the data for pick in event.picks: if not data_start < pick.time < data_end: Logger.warning( "Pick outside of data span: Pick time {0} Start " "time {1} End time: {2}".format( str(pick.time), str(data_start), str(data_end))) use_event = False if not use_event: Logger.error('Event is not within data time-span') continue # Read in pick info Logger.debug("I have found the following picks") for pick in event.picks: if not pick.waveform_id: Logger.warning( 'Pick not associated with waveforms, will not use:' ' {0}'.format(pick)) continue Logger.debug(pick) stations.append(pick.waveform_id.station_code) channels.append(pick.waveform_id.channel_code) # Check to see if all picks have a corresponding waveform for tr in st: st_stachans.append('.'.join( [tr.stats.station, tr.stats.channel])) # Cut and extract the templates template = _template_gen(event.picks, st, length, swin, prepick=prepick, plot=plot, all_horiz=all_horiz, delayed=delayed, min_snr=min_snr, plotdir=plotdir) process_lengths.append(len(st[0].data) / samp_rate) temp_list.append(template) catalog_out += event if save_progress: if not os.path.isdir("eqcorrscan_temporary_templates"): os.makedirs("eqcorrscan_temporary_templates") for template in temp_list: template.write( "eqcorrscan_temporary_templates{0}{1}.ms".format( os.path.sep, template[0].stats.starttime.strftime( "%Y-%m-%dT%H%M%S")), format="MSEED") del st if return_event: return temp_list, catalog_out, process_lengths return temp_list
def from_sac(sac_files, lowcut, highcut, samp_rate, filt_order, length, swin, prepick=0.05, debug=0, plot=False): """Function to read picks and waveforms from SAC data, and generate a \ template from these. Usually sac_files is a list of all single-channel \ SAC files for a given event, a single, multi-channel template will be \ created from these traces. :type sac_files: list or stream :param sac_files: List or stream of sac waveforms, or list of paths to \ sac waveforms. :type lowcut: float :param lowcut: Low cut (Hz), if set to None will look in template \ defaults file :type highcut: float :param highcut: High cut (Hz), if set to None will look in template \ defaults file :type samp_rate: float :param samp_rate: New sampling rate in Hz, if set to None will look in \ template defaults file :type filt_order: int :param filt_order: Filter level, if set to None will look in \ template defaults file :type swin: str :param swin: Either 'all', 'P' or 'S', to select which phases to output. :type length: float :param length: Extract length in seconds, if None will look in template \ defaults file. :type prepick: float :param prepick: Length to extract prior to the pick in seconds. :type debug: int :param debug: Debug level, higher number=more output. :type plot: bool :param plot: Turns template plotting on or off. :returns: obspy.Stream Newly cut template .. note:: This functionality is not supported for obspy versions below \ 1.0.0 as references times are not read in by SACIO, which are needed \ for defining pick times. """ from obspy import read, Stream from eqcorrscan.utils.sac_util import sactoevent from eqcorrscan.utils import pre_processing # Check whether sac_files is a stream or a list if isinstance(sac_files, list): if isinstance(sac_files[0], str) or isinstance(sac_files[0], unicode): sac_files = [read(sac_file)[0] for sac_file in sac_files] if isinstance(sac_files[0], Stream): # This is a list of streams... st = sac_files[0] for sac_file in sac_files[1:]: st += sac_file st = Stream(sac_files) elif isinstance(sac_files, Stream): st = sac_files # Make an event object... event = sactoevent(st, debug=debug) # Process the data st.merge(fill_value='interpolate') st = pre_processing.shortproc(st, lowcut, highcut, filt_order, samp_rate, debug) template = _template_gen(picks=event.picks, st=st, length=length, swin=swin, prepick=prepick, plot=plot, debug=debug) return template
def from_sac(sac_files, lowcut, highcut, samp_rate, filt_order, length, swin, prepick=0.05, debug=0, plot=False): """ Generate a multiplexed template from a list of SAC files. Function to read picks and waveforms from SAC data, and generate a \ template from these. Usually sac_files is a list of all single-channel \ SAC files for a given event, a single, multi-channel template will be \ created from these traces. **All files listed in sac_files should be associated with a single event.** :type sac_files: list :param sac_files: osbpy.core.stream.Stream of sac waveforms, or list of paths to sac waveforms. :type lowcut: float :param lowcut: Low cut (Hz), if set to None will look in template \ defaults file :type highcut: float :param highcut: High cut (Hz), if set to None will look in template \ defaults file :type samp_rate: float :param samp_rate: New sampling rate in Hz, if set to None will look in \ template defaults file :type filt_order: int :param filt_order: Filter level, if set to None will look in \ template defaults file :type swin: str :param swin: Either 'all', 'P' or 'S', to select which phases to output. :type length: float :param length: Extract length in seconds, if None will look in template \ defaults file. :type prepick: float :param prepick: Length to extract prior to the pick in seconds. :type debug: int :param debug: Debug level, higher number=more output. :type plot: bool :param plot: Turns template plotting on or off. :returns: obspy.core.stream.Stream Newly cut template .. note:: This functionality is not supported for obspy versions below \ 1.0.0 as references times are not read in by SACIO, which are needed \ for defining pick times. .. rubric:: Example >>> from eqcorrscan.core.template_gen import from_sac >>> import glob >>> # Get all the SAC-files associated with one event. >>> sac_files = glob.glob('eqcorrscan/tests/test_data/SAC/2014p611252/*') >>> template = from_sac(sac_files=sac_files, lowcut=2.0, highcut=10.0, ... samp_rate=25.0, filt_order=4, length=2.0, ... swin='all', prepick=0.1) >>> print(template[0].stats.sampling_rate) 25.0 >>> print(len(template)) 15 """ from obspy import read, Stream from eqcorrscan.utils.sac_util import sactoevent from eqcorrscan.utils import pre_processing # Check whether sac_files is a stream or a list if isinstance(sac_files, list): if isinstance(sac_files[0], str) or isinstance(sac_files[0], unicode): sac_files = [read(sac_file)[0] for sac_file in sac_files] if isinstance(sac_files[0], Stream): # This is a list of streams... st = sac_files[0] for sac_file in sac_files[1:]: st += sac_file st = Stream(sac_files) elif isinstance(sac_files, Stream): st = sac_files # Make an event object... event = sactoevent(st, debug=debug) # Process the data st.merge(fill_value='interpolate') st = pre_processing.shortproc(st, lowcut, highcut, filt_order, samp_rate, debug) template = _template_gen(picks=event.picks, st=st, length=length, swin=swin, prepick=prepick, plot=plot, debug=debug) return template