コード例 #1
0
def test_weird_sensitivity():
    datafiles, origin = read_data_dir("fdsn", "us70008dx7", "SL.KOGS*.mseed")
    streams = []
    for datafile in datafiles:
        streams += read_obspy(datafile)
    sc = StreamCollection(streams)
    psc = process_streams(sc, origin)
    channel = psc[0].select(component="E")[0]
    assert_almost_equal(channel.data.max(), 62900.197618074293)
コード例 #2
0
def test_weird_sensitivity():
    datafiles, origin = read_data_dir('fdsn', 'us70008dx7', 'SL.KOGS*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_obspy(datafile)
    sc = StreamCollection(streams)
    psc = process_streams(sc, origin)
    channel = psc[0].select(component='E')[0]
    assert_almost_equal(channel.data.max(), 62900.191900393373)
コード例 #3
0
def test():
    datafiles, origin = read_data_dir("fdsn", "nc72282711", "BK.CMB*.mseed")
    streams = []
    for datafile in datafiles:
        streams += read_obspy(datafile)

    assert streams[0].get_id() == "BK.CMB.HN"

    datafiles, origin = read_data_dir("fdsn", "nc72282711", "TA.M04C*.mseed")
    streams = []
    for datafile in datafiles:
        streams += read_obspy(datafile)

    assert streams[0].get_id() == "TA.M04C.HN"

    # test assignment of Z channel
    datafiles, origin = read_data_dir("fdsn", "nc73300395", "BK.VALB*.mseed")
    streams = []
    for datafile in datafiles:
        streams += read_obspy(datafile)

    # get all channel names
    channels = sorted([st[0].stats.channel for st in streams])
    assert channels == ["HN2", "HN3", "HNZ"]
コード例 #4
0
def test():
    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_obspy(datafile)

    assert streams[0].get_id() == 'BK.CMB.HN'

    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'TA.M04C*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_obspy(datafile)

    assert streams[0].get_id() == 'TA.M04C.HN'

    # test assignment of Z channel
    datafiles, origin = read_data_dir('fdsn', 'nc73300395', 'BK.VALB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_obspy(datafile)

    # get all channel names
    channels = sorted([st[0].stats.channel for st in streams])
    assert channels == ['HN2', 'HN3', 'HNZ']
コード例 #5
0
def test_sac_csn():
    # This reads in example SAC data that does not have a separate metadata
    # file to meet the needs of the Community Seismic Network:
    # http://csn.caltech.edu/
    datafiles, origin = read_data_dir("csn", "ci38457511", "*.sac")
    datafiles.sort()
    traces = []
    for d in datafiles:
        traces.append(read_obspy(d)[0][0])

    tr_amax = np.zeros(len(traces))
    for i, tr in enumerate(traces):
        tr_amax[i] = np.max(np.abs(tr.data))

    target_amax = np.array([4.3384003e-09, 3.42233e-09, 1.0121747e-07])
    np.testing.assert_allclose(target_amax, tr_amax)
コード例 #6
0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        # Bail out if FDSNFetcher not configured
        if 'FDSNFetcher' not in self.config['fetchers']:
            return
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            obspy_stream_handler = obspy_logger.handlers[0]
            obspy_logger.removeHandler(obspy_stream_handler)
            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        domain = CircularDomain(latitude=self.lat,
                                longitude=self.lon,
                                minradius=0,
                                maxradius=self.radius)

        min_dist = self.minimum_interstation_distance_in_m
        restrictions = Restrictions(
            # Define the temporal bounds of the waveform data.
            starttime=origin_time - self.time_before,
            endtime=origin_time + self.time_after,
            network=self.network,
            station='*',
            location='*',
            location_priorities=['*'],
            reject_channels_with_gaps=self.reject_channels_with_gaps,
            # Any trace that is shorter than 95 % of the
            # desired total duration will be discarded.
            minimum_length=self.minimum_length,
            sanitize=self.sanitize,
            minimum_interstation_distance_in_m=min_dist,
            exclude_networks=self.exclude_networks,
            exclude_stations=self.exclude_stations,
            channel_priorities=self.channels)

        # For each of the providers, check if we have a username and password
        # provided in the config. If we do, initialize the client with the
        # username and password. Otherwise, use default initalization.
        client_list = []
        for provider_str in URL_MAPPINGS.keys():
            if provider_str == GEO_NET_ARCHIVE_KEY:
                dt = UTCDateTime.utcnow() - UTCDateTime(self.time)
                if dt < GEONET_ARCHIVE_DAYS:
                    provider_str = GEONET_REALTIME_URL
            try:
                fdsn_config = self.config['fetchers']['FDSNFetcher']
                if provider_str in fdsn_config:
                    client = Client(
                        provider_str,
                        user=fdsn_config[provider_str]['user'],
                        password=fdsn_config[provider_str]['password'])
                else:
                    client = Client(provider_str)
                client_list.append(client)
            # If the FDSN service is down, then an FDSNException is raised
            except FDSNException:
                logging.warning('Unable to initalize client %s' % provider_str)
            except KeyError:
                logging.warning('Unable to initalize client %s' % provider_str)

        if len(client_list):
            # Pass off the initalized clients to the Mass Downloader
            mdl = MassDownloader(providers=client_list)

            logging.info('Downloading new MiniSEED files...')
            # The data will be downloaded to the ``./waveforms/`` and
            # ``./stations/`` folders with automatically chosen file names.
            mdl.download(domain,
                         restrictions,
                         mseed_storage=rawdir,
                         stationxml_storage=rawdir)

            seed_files = glob.glob(os.path.join(rawdir, '*.mseed'))
            streams = []
            for seed_file in seed_files:
                try:
                    tstreams = read_obspy(seed_file, self.config)
                except BaseException as e:
                    tstreams = None
                    fmt = 'Could not read seed file %s - "%s"'
                    logging.info(fmt % (seed_file, str(e)))
                if tstreams is None:
                    continue
                else:
                    streams += tstreams

            stream_collection = StreamCollection(
                streams=streams, drop_non_free=self.drop_non_free)
            return stream_collection
コード例 #7
0
def test_channel_exclusion():
    exclude_patterns = ["*.*.??.???"]
    datafiles, origin = read_data_dir("fdsn", "se60247871", "US.LRAL*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ["*.*.??.LN?"]
    datafiles, origin = read_data_dir("fdsn", "se60247871", "US.LRAL*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ["*.*.??.LN?"]
    datafiles, origin = read_data_dir("fdsn", "nc72282711", "BK.CMB*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 3

    exclude_patterns = ["*.*.??.[BH]NZ"]
    datafiles, origin = read_data_dir("fdsn", "ci38445975", "CI.MIKB*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 4

    exclude_patterns = ["US.*.??.???"]
    datafiles, origin = read_data_dir("fdsn", "se60247871", "US.LRAL*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ["*.LRAL.??.???"]
    datafiles, origin = read_data_dir("fdsn", "se60247871", "US.LRAL*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ["*.*.40.???"]
    datafiles, origin = read_data_dir("fdsn", "nc73300395", "BK.VALB*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ["US.LRAL.20.LNZ"]
    datafiles, origin = read_data_dir("fdsn", "se60247871", "US.LRAL*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 2

    exclude_patterns = ["*.*.??.BN?", "*.*.??.HN?"]
    datafiles, origin = read_data_dir("fdsn", "ci38445975", "CI.MIKB*.mseed")
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0
コード例 #8
0
def test_channel_exclusion():
    exclude_patterns = ['*.*.??.???']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['*.*.??.LN?']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['*.*.??.LN?']
    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 3

    exclude_patterns = ['*.*.??.[BH]NZ']
    datafiles, origin = read_data_dir('fdsn', 'ci38445975', 'CI.MIKB*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 4

    exclude_patterns = ['US.*.??.???']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['*.LRAL.??.???']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['*.*.40.???']
    datafiles, origin = read_data_dir('fdsn', 'nc73300395', 'BK.VALB*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['US.LRAL.20.LNZ']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 2

    exclude_patterns = ['*.*.??.BN?', '*.*.??.HN?']
    datafiles, origin = read_data_dir('fdsn', 'ci38445975', 'CI.MIKB*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_obspy(datafile, exclude_patterns=exclude_patterns)
        if tstreams is None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0
コード例 #9
0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        # Bail out if FDSNFetcher not configured
        if "FDSNFetcher" not in self.config["fetchers"]:
            return

        fdsn_conf = self.config["fetchers"]["FDSNFetcher"]
        rawdir = self.rawdir

        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            try:
                obspy_stream_handler = obspy_logger.handlers[0]
                obspy_logger.removeHandler(obspy_stream_handler)
            except IndexError:
                pass

            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        if fdsn_conf["domain"]["type"] == "circular":
            dconf = fdsn_conf["domain"]["circular"]
            if dconf["use_epicenter"]:
                dconf["latitude"] = self.lat
                dconf["longitude"] = self.lon
            dconf.pop("use_epicenter")
            domain = CircularDomain(**dconf)
        elif fdsn_conf["domain"]["type"] == "rectangular":
            dconf = fdsn_conf["domain"]["rectangular"]
            domain = RectangularDomain(**dconf)
        else:
            raise ValueError(
                'Domain type must be either "circular" or "rectangular".')

        rconf = fdsn_conf["restrictions"]

        rconf["starttime"] = origin_time - rconf["time_before"]
        rconf["endtime"] = origin_time + rconf["time_after"]
        rconf.pop("time_before")
        rconf.pop("time_after")

        restrictions = Restrictions(**rconf)

        # For each of the providers, check if we have a username and password provided
        # in the config. If we do, initialize the client with the username and password.
        # Otherwise, use default initalization.
        providers = URL_MAPPINGS
        if "IRISPH5" in providers.keys():
            del providers["IRISPH5"]

        client_list = []
        for provider_str in providers.keys():
            if provider_str == GEO_NET_ARCHIVE_KEY:
                dt = UTCDateTime.utcnow() - UTCDateTime(self.time)
                if dt < GEONET_ARCHIVE_DAYS:
                    provider_str = GEONET_REALTIME_URL
            try:
                fdsn_config = self.config["fetchers"]["FDSNFetcher"]
                if provider_str in fdsn_config:
                    if logging.getLevelName(root.level) == "DEBUG":
                        client = Client(
                            provider_str,
                            user=fdsn_config[provider_str]["user"],
                            password=fdsn_config[provider_str]["password"],
                            debug=True,
                        )
                    else:
                        client = Client(
                            provider_str,
                            user=fdsn_config[provider_str]["user"],
                            password=fdsn_config[provider_str]["password"],
                        )
                else:
                    if logging.getLevelName(root.level) == "DEBUG":
                        client = Client(provider_str, debug=True)
                    else:
                        client = Client(provider_str)

                client_list.append(client)
            # If the FDSN service is down, then an FDSNException is raised
            except FDSNException:
                logging.warning(f"Unable to initalize client {provider_str}")
            except KeyError:
                logging.warning(f"Unable to initalize client {provider_str}")

        if len(client_list):
            # Pass off the initalized clients to the Mass Downloader
            if logging.getLevelName(root.level) == "DEBUG":
                for handler in root.handlers:
                    if hasattr(handler, "baseFilename"):
                        log_file = getattr(handler, "baseFilename")
                sys.stdout = open(log_file, "a")
                mdl = MassDownloader(providers=client_list, debug=True)
            else:
                try:
                    # Need to turn off built in logging for ObsPy>=1.3.0
                    mdl = MassDownloader(providers=client_list,
                                         configure_logging=False)
                except TypeError:
                    # For ObsPy<1.3.0 the configure_logging parameter doesn't exist
                    mdl = MassDownloader(providers=client_list)

            logging.info("Downloading new MiniSEED files...")
            # The data will be downloaded to the ``./waveforms/`` and
            # ``./stations/`` folders with automatically chosen file names.
            mdl.download(domain,
                         restrictions,
                         mseed_storage=rawdir,
                         stationxml_storage=rawdir)
            sys.stdout.close()

            if self.stream_collection:
                seed_files = glob.glob(os.path.join(rawdir, "*.mseed"))
                streams = []
                for seed_file in seed_files:
                    try:
                        tstreams = read_obspy(seed_file, self.config)
                    except BaseException as e:
                        tstreams = None
                        fmt = 'Could not read seed file %s - "%s"'
                        logging.info(fmt % (seed_file, str(e)))
                    if tstreams is None:
                        continue
                    else:
                        streams += tstreams

                stream_collection = StreamCollection(
                    streams=streams, drop_non_free=self.drop_non_free)
                return stream_collection
            else:
                return None