Пример #1
0
def test():
    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'BK.CMB.HN'

    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'TA.M04C*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'TA.M04C.HN'

    # test assignment of Z channel
    datafiles, origin = read_data_dir('fdsn', 'nc73300395', 'BK.VALB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    # get all channel names
    channels = sorted([st[0].stats.channel for st in streams])
    assert channels == ['HN2', 'HN3', 'HNZ']

    # DEBUGGING
    sc = StreamCollection(streams)
    psc = process_streams(sc, origin)
Пример #2
0
def test():
    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'BK.CMB.HN'

    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'TA.M04C*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'TA.M04C.HN'
Пример #3
0
def test_weird_sensitivity():
    datafiles, origin = read_data_dir('fdsn', 'us70008dx7', 'SL.KOGS*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)
    sc = StreamCollection(streams)
    psc = process_streams(sc, origin)
    channel = psc[0].select(component='E')[0]
    assert_almost_equal(channel.data.max(), 62900.191900393373)
Пример #4
0
def test():
    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'BK.CMB.HN'

    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'TA.M04C*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'TA.M04C.HN'

    # DEBUGGING
    sc = StreamCollection(streams)
    psc = process_streams(sc, origin)
Пример #5
0
def test_channel_exclusion():
    
    exclude_patterns = ['*.*.??.???']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['*.*.??.LN?']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['*.*.??.LN?']
    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 3

    exclude_patterns = ['*.*.??.[BH]NZ']
    datafiles, origin = read_data_dir('fdsn', 'ci38445975', 'CI.MIKB*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 4

    exclude_patterns = ['US.*.??.???']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['*.LRAL.??.???']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['*.*.40.???']
    datafiles, origin = read_data_dir('fdsn', 'nc73300395', 'BK.VALB*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0

    exclude_patterns = ['US.LRAL.20.LNZ']
    datafiles, origin = read_data_dir('fdsn', 'se60247871', 'US.LRAL*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 2

    exclude_patterns = ['*.*.??.BN?', '*.*.??.HN?']
    datafiles, origin = read_data_dir('fdsn', 'ci38445975', 'CI.MIKB*.mseed')
    streams = []
    for datafile in datafiles:
        tstreams = read_fdsn(datafile, exclude_patterns=exclude_patterns)
        if tstreams == None:
            continue
        else:
            streams += tstreams
    assert len(streams) == 0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            obspy_stream_handler = obspy_logger.handlers[0]
            obspy_logger.removeHandler(obspy_stream_handler)
            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        domain = CircularDomain(latitude=self.lat,
                                longitude=self.lon,
                                minradius=0,
                                maxradius=self.radius)

        restrictions = Restrictions(
            # Define the temporal bounds of the waveform data.
            starttime=origin_time - self.time_before,
            endtime=origin_time + self.time_after,
            network=self.network,
            station='*',
            location='*',
            location_priorities=['*'],
            reject_channels_with_gaps=self.reject_channels_with_gaps,
            # Any trace that is shorter than 95 % of the
            # desired total duration will be discarded.
            minimum_length=self.minimum_length,
            sanitize=self.sanitize,
            minimum_interstation_distance_in_m=self.
            minimum_interstation_distance_in_m,
            exclude_networks=self.exclude_networks,
            exclude_stations=self.exclude_stations,
            channel_priorities=self.channels)

        # For each of the providers, check if we have a username and password
        # provided in the config. If we do, initialize the client with the
        # username and password. Otherwise, use default initalization.
        fdsn_config = self.config['fetchers']['FDSNFetcher']
        client_list = []
        for provider_str in URL_MAPPINGS.keys():
            try:
                if provider_str in fdsn_config:
                    client = Client(
                        provider_str,
                        user=fdsn_config[provider_str]['user'],
                        password=fdsn_config[provider_str]['password'])
                else:
                    client = Client(provider_str)
                client_list.append(client)
            # If the FDSN service is down, then an FDSNException is raised
            except FDSNException:
                logging.warning('Unable to initalize client %s' % provider_str)

        # Pass off the initalized clients to the Mass Downloader
        mdl = MassDownloader(providers=client_list)

        # we can have a problem of file overlap, so let's remove existing
        # mseed files from the raw directory.
        logging.info('Deleting old MiniSEED files...')
        delete_old_files(rawdir, '*.mseed')

        # remove existing png files as well
        logging.info('Deleting old PNG files...')
        delete_old_files(rawdir, '*.png')

        # remove existing xml files as well
        logging.info('Deleting old XML files...')
        delete_old_files(rawdir, '*.xml')

        logging.info('Downloading new MiniSEED files...')
        # The data will be downloaded to the ``./waveforms/`` and
        # ``./stations/`` folders with automatically chosen file names.
        mdl.download(domain,
                     restrictions,
                     mseed_storage=rawdir,
                     stationxml_storage=rawdir)

        seed_files = glob.glob(os.path.join(rawdir, '*.mseed'))
        streams = []
        for seed_file in seed_files:
            try:
                tstreams = read_fdsn(seed_file)
            except Exception as e:
                fmt = 'Could not read seed file %s - "%s"'
                logging.info(fmt % (seed_file, str(e)))
            streams += tstreams

        stream_collection = StreamCollection(streams=streams,
                                             drop_non_free=self.drop_non_free)
        return stream_collection
Пример #7
0
    def retrieveData(self):
        """Retrieve data from many FDSN services, turn into StreamCollection.

        Args:
            event (dict):
                Best dictionary matching input event, fields as above
                in return of getMatchingEvents().

        Returns:
            StreamCollection: StreamCollection object.
        """
        rawdir = self.rawdir
        if self.rawdir is None:
            rawdir = tempfile.mkdtemp()
        else:
            if not os.path.isdir(rawdir):
                os.makedirs(rawdir)

        # use the mass downloader to retrieve data of interest from any FSDN
        # service.
        origin_time = UTCDateTime(self.time)

        # The Obspy mass downloader has it's own logger - grab that stream
        # and write it to our own log file
        ldict = logging.Logger.manager.loggerDict
        if OBSPY_LOGGER in ldict:
            root = logging.getLogger()
            fhandler = root.handlers[0]
            obspy_logger = logging.getLogger(OBSPY_LOGGER)
            obspy_stream_handler = obspy_logger.handlers[0]
            obspy_logger.removeHandler(obspy_stream_handler)
            obspy_logger.addHandler(fhandler)

        # Circular domain around the epicenter.
        domain = CircularDomain(latitude=self.lat, longitude=self.lon,
                                minradius=0, maxradius=self.radius)

        restrictions = Restrictions(
            # Define the temporal bounds of the waveform data.
            starttime=origin_time - self.time_before,
            endtime=origin_time + self.time_after,
            network=self.network, station='*',
            location='*',
            location_priorities=['*'],
            reject_channels_with_gaps=self.reject_channels_with_gaps,
            # Any trace that is shorter than 95 % of the
            # desired total duration will be discarded.
            minimum_length=self.minimum_length,
            sanitize=self.sanitize,
            minimum_interstation_distance_in_m=self.minimum_interstation_distance_in_m,
            exclude_networks=self.exclude_networks,
            exclude_stations=self.exclude_stations,

            channel_priorities=self.channels)

        # DEBUGGING
        pp = pprint.PrettyPrinter()
        pp.pprint(domain.__dict__)
        print('***************************')
        pp.pprint(restrictions.__dict__)
        # DEBUGGING

        # No specified providers will result in all known ones being queried.
        mdl = MassDownloader()

        # we can have a problem of file overlap, so let's remove existing
        # mseed files from the raw directory.
        logging.info('Deleting old MiniSEED files...')
        delete_old_files(rawdir, '*.mseed')

        # remove existing png files as well
        logging.info('Deleting old PNG files...')
        delete_old_files(rawdir, '*.png')

        # remove existing xml files as well
        logging.info('Deleting old XML files...')
        delete_old_files(rawdir, '*.xml')

        logging.info('Downloading new MiniSEED files...')
        # The data will be downloaded to the ``./waveforms/`` and ``./stations/``
        # folders with automatically chosen file names.
        mdl.download(domain, restrictions, mseed_storage=rawdir,
                     stationxml_storage=rawdir)

        seed_files = glob.glob(os.path.join(rawdir, '*.mseed'))
        streams = []
        for seed_file in seed_files:
            tstreams = read_fdsn(seed_file)
            streams += tstreams

        stream_collection = StreamCollection(streams=streams,
                                             drop_non_free=self.drop_non_free)
        return stream_collection