Esempio n. 1
0
def stationXML2stationDD(filename, output='station.dat'):
    '''
    Write the station.data input file for ph2dt and hypodd.
    The input format is stationXML.

    :param filename: string or list of strings
    :param output: string of output-filename

    returns output
    '''

    if isinstance(filename, str):
        inv = stationxml.load_xml(filename=filename)
    elif isinstance(filename, list):
        inv = stationxml.load_xml(filename=filename[0])
        for fi in filename[1:]:
            inv.network_list.extend(
                stationxml.load_xml(filename=fi).network_list)

    outStat = []
    for netStat in inv.ns_code_list:
        s = [
            stat for stat in inv.get_pyrocko_stations()
            if netStat[0] == stat.network and netStat[1] == stat.station
        ][0]
        outStat.append(' '.join(
            map(str, [netStat[0] + netStat[1], s.lat, s.lon, '\n'])))

    with open(output, 'w') as f:
        f.writelines(outStat)

    return (output)
    def test_conversions(self):

        from pyrocko import model
        from pyrocko.io import resp, enhanced_sacpz
        from pyrocko.io import stationxml

        t = util.str_to_time('2014-01-01 00:00:00')
        codes = 'GE', 'EIL', '', 'BHZ'

        resp_fpath = common.test_data_file('test1.resp')
        stations = [model.Station(
            *codes[:3],
            lat=29.669901,
            lon=34.951199,
            elevation=210.0,
            depth=0.0)]

        sx_resp = resp.make_stationxml(
            stations, resp.iload_filename(resp_fpath))

        pr_sx_resp = sx_resp.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')
        pr_evresp = trace.Evalresp(
            resp_fpath, nslc_id=codes, target='vel', time=t)

        sacpz_fpath = common.test_data_file('test1.sacpz')
        sx_sacpz = enhanced_sacpz.make_stationxml(
            enhanced_sacpz.iload_filename(sacpz_fpath))
        pr_sx_sacpz = sx_sacpz.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')
        pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath))
        try:
            pr_sacpz.zeros.remove(0.0j)
        except ValueError:
            pr_sacpz.poles.append(0.0j)

        sxml_geofon_fpath = common.test_data_file('test1.stationxml')
        sx_geofon = stationxml.load_xml(filename=sxml_geofon_fpath)
        pr_sx_geofon = sx_geofon.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')

        sxml_iris_fpath = common.test_data_file('test2.stationxml')
        sx_iris = stationxml.load_xml(filename=sxml_iris_fpath)
        pr_sx_iris = sx_iris.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')

        freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000)
        tf_ref = pr_evresp.evaluate(freqs)
        for pr in [pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon,
                   pr_sx_iris]:
            tf = pr.evaluate(freqs)
            # plot_tfs(freqs, [tf_ref, tf])
            assert cnumeqrel(tf_ref, tf, 0.01)
Esempio n. 3
0
    def get_receivers(self):
        '''Aggregate receivers from different sources.'''

        fp = self.expand_path

        if self._receivers is None:
            self._receivers = list(self.receivers)
            if self.stations_path:
                for station in model.load_stations(fp(self.stations_path)):
                    self._receivers.append(
                        receiver.Receiver(codes=station.nsl(),
                                          lat=station.lat,
                                          lon=station.lon,
                                          z=station.depth))

            if self.stations_stationxml_path:
                sx = stationxml.load_xml(
                    filename=fp(self.stations_stationxml_path))
                for station in sx.get_pyrocko_stations():
                    self._receivers.append(
                        receiver.Receiver(codes=station.nsl(),
                                          lat=station.lat,
                                          lon=station.lon,
                                          z=station.depth))

        return self._receivers
Esempio n. 4
0
    def test_read_samples(self):
        ok = False
        for fn in ['geeil.iris.xml', 'geeil.geofon.xml']:
            fpath = common.test_data_file(fn)
            x = stationxml.load_xml(filename=fpath)
            for network in x.network_list:
                assert network.code == 'GE'
                for station in network.station_list:
                    assert station.code == 'EIL'
                    for channel in station.channel_list:
                        assert channel.code[:2] == 'BH'
                        for stage in channel.response.stage_list:
                            ok = True

            assert ok

            pstations = x.get_pyrocko_stations()
            assert len(pstations) in (3, 4)
            for s in x.get_pyrocko_stations():
                assert len(s.get_channels()) == 3

            assert len(
                x.get_pyrocko_stations(time=stt('2010-01-15 10:00:00'))) == 1

            new = stationxml.FDSNStationXML.from_pyrocko_stations(pstations)
            assert len(new.get_pyrocko_stations()) in (3, 4)
            for s in new.get_pyrocko_stations():
                assert len(s.get_channels()) == 3
    def test_read_samples(self):
        ok = False
        for fn in ['geeil.iris.xml', 'geeil.geofon.xml']:
            fpath = common.test_data_file(fn)
            x = stationxml.load_xml(filename=fpath)
            for network in x.network_list:
                assert network.code == 'GE'
                for station in network.station_list:
                    assert station.code == 'EIL'
                    for channel in station.channel_list:
                        assert channel.code[:2] == 'BH'
                        for stage in channel.response.stage_list:
                            ok = True

            assert ok

            pstations = x.get_pyrocko_stations()
            assert len(pstations) in (3, 4)
            for s in x.get_pyrocko_stations():
                assert len(s.get_channels()) == 3

            assert len(x.get_pyrocko_stations(
                time=stt('2010-01-15 10:00:00'))) == 1

            new = stationxml.FDSNStationXML.from_pyrocko_stations(pstations)
            assert len(new.get_pyrocko_stations()) in (3, 4)
            for s in new.get_pyrocko_stations():
                assert len(s.get_channels()) == 3
Esempio n. 6
0
File: fdsn.py Progetto: wsja/pyrocko
def station(url=g_url,
            site=g_default_site,
            majorversion=1,
            parsed=True,
            selection=None,
            **kwargs):

    url = fillurl(url, site, 'station', majorversion)

    params = fix_params(kwargs)

    if selection:
        lst = []
        for k, v in params.items():
            lst.append('%s=%s' % (k, v))

        for (network, station, location, channel, tmin, tmax) in selection:
            if location == '':
                location = '--'

            lst.append(' '.join((network, station, location, channel,
                                 sdatetime(tmin), sdatetime(tmax))))

        post = '\n'.join(lst)
        params = dict(post=post.encode())

    if parsed:
        from pyrocko.io import stationxml
        format = params.get('format', 'xml')
        if format == 'text':
            if params.get('level', 'station') == 'channel':
                return stationxml.load_channel_table(
                    stream=_request(url, **params))
            else:
                raise InvalidRequest('if format="text" shall be parsed, '
                                     'level="channel" is required')

        elif format == 'xml':
            assert params.get('format', 'xml') == 'xml'
            return stationxml.load_xml(stream=_request(url, **params))
        else:
            raise InvalidRequest('format must be "xml" or "text"')
    else:
        return _request(url, **params)
Esempio n. 7
0
    def get_stations(self):
        if self._stations is None:

            stations = []

            if self.stations_paths:
                for filename in self.stations_paths:
                    stations.extend(load_stations(filename))

            if self.stations_stationxml_paths:
                for filename in self.stations_stationxml_paths:
                    sxml = stationxml.load_xml(filename=filename)
                    stations.extend(sxml.get_pyrocko_stations())

            if self.pyrocko_stations:
                stations.extend(self.pyrocko_stations)

            self._stations = stations

        return self._stations
Esempio n. 8
0
def station(url=g_url, site=g_default_site, majorversion=1, parsed=True,
            selection=None, **kwargs):

    url = fillurl(url, site, 'station', majorversion)

    params = fix_params(kwargs)

    if selection:
        lst = []
        for k, v in params.items():
            lst.append('%s=%s' % (k, v))

        for (network, station, location, channel, tmin, tmax) in selection:
            if location == '':
                location = '--'

            lst.append(' '.join((network, station, location, channel,
                                 sdatetime(tmin), sdatetime(tmax))))

        post = '\n'.join(lst)
        params = dict(post=post.encode())

    if parsed:
        from pyrocko.io import stationxml
        format = kwargs.get('format', 'xml')
        if format == 'text':
            if kwargs.get('level', 'station') == 'channel':
                return stationxml.load_channel_table(
                    stream=_request(url, **params))
            else:
                raise InvalidRequest('if format="text" shall be parsed, '
                                     'level="channel" is required')

        elif format == 'xml':
            assert kwargs.get('format', 'xml') == 'xml'
            return stationxml.load_xml(stream=_request(url, **params))
        else:
            raise InvalidRequest('format must be "xml" or "text"')
    else:
        return _request(url, **params)
Esempio n. 9
0
def snuffler_from_commandline(args=None):
    if args is None:
        args = sys.argv[1:]

    usage = '''usage: %prog [options] waveforms ...'''
    parser = OptionParser(usage=usage)

    parser.add_option(
        '--format',
        dest='format',
        default='detect',
        choices=io.allowed_formats('load'),
        help='assume input files are of given FORMAT. Choices: %s'
             % io.allowed_formats('load', 'cli_help', 'detect'))

    parser.add_option(
        '--pattern',
        dest='regex',
        metavar='REGEX',
        help='only include files whose paths match REGEX')

    parser.add_option(
        '--stations',
        dest='station_fns',
        action='append',
        default=[],
        metavar='STATIONS',
        help='read station information from file STATIONS')

    parser.add_option(
        '--stationxml',
        dest='stationxml_fns',
        action='append',
        default=[],
        metavar='STATIONSXML',
        help='read station information from XML file STATIONSXML')

    parser.add_option(
        '--event', '--events',
        dest='event_fns',
        action='append',
        default=[],
        metavar='EVENT',
        help='read event information from file EVENT')

    parser.add_option(
        '--markers',
        dest='marker_fns',
        action='append',
        default=[],
        metavar='MARKERS',
        help='read marker information file MARKERS')

    parser.add_option(
        '--follow',
        type='float',
        dest='follow',
        metavar='N',
        help='follow real time with a window of N seconds')

    parser.add_option(
        '--cache',
        dest='cache_dir',
        default=config.config().cache_dir,
        metavar='DIR',
        help='use directory DIR to cache trace metadata '
             '(default=\'%default\')')

    parser.add_option(
        '--force-cache',
        dest='force_cache',
        action='store_true',
        default=False,
        help='use the cache even when trace attribute spoofing is active '
             '(may have silly consequences)')

    parser.add_option(
        '--store-path',
        dest='store_path',
        metavar='PATH_TEMPLATE',
        help='store data received through streams to PATH_TEMPLATE')

    parser.add_option(
        '--store-interval',
        type='float',
        dest='store_interval',
        default=600,
        metavar='N',
        help='dump stream data to file every N seconds [default: %default]')

    parser.add_option(
        '--ntracks',
        type='int',
        dest='ntracks',
        default=24,
        metavar='N',
        help='initially use N waveform tracks in viewer [default: %default]')

    parser.add_option(
        '--opengl',
        dest='opengl',
        action='store_true',
        default=False,
        help='use OpenGL for drawing')

    parser.add_option(
        '--qt5',
        dest='gui_toolkit_qt5',
        action='store_true',
        default=False,
        help='use Qt5 for the GUI')

    parser.add_option(
        '--qt4',
        dest='gui_toolkit_qt4',
        action='store_true',
        default=False,
        help='use Qt4 for the GUI')

    parser.add_option(
        '--debug',
        dest='debug',
        action='store_true',
        default=False,
        help='print debugging information to stderr')

    options, args = parser.parse_args(list(args))

    if options.debug:
        util.setup_logging('snuffler', 'debug')
    else:
        util.setup_logging('snuffler', 'warning')

    if options.gui_toolkit_qt4:
        config.override_gui_toolkit = 'qt4'

    if options.gui_toolkit_qt5:
        config.override_gui_toolkit = 'qt5'

    this_pile = pile.Pile()
    stations = []
    for stations_fn in extend_paths(options.station_fns):
        stations.extend(model.station.load_stations(stations_fn))

    for stationxml_fn in extend_paths(options.stationxml_fns):
        stations.extend(
            stationxml.load_xml(
                filename=stationxml_fn).get_pyrocko_stations())

    events = []
    for event_fn in extend_paths(options.event_fns):
        events.extend(model.event.Event.load_catalog(event_fn))

    markers = []
    for marker_fn in extend_paths(options.marker_fns):
        markers.extend(marker.load_markers(marker_fn))

    return snuffle(
        this_pile,
        stations=stations,
        events=events,
        markers=markers,
        ntracks=options.ntracks,
        follow=options.follow,
        controls=True,
        opengl=options.opengl,
        paths=args,
        cache_dir=options.cache_dir,
        regex=options.regex,
        format=options.format,
        force_cache=options.force_cache,
        store_path=options.store_path,
        store_interval=options.store_interval)
Esempio n. 10
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option('--force',
                      dest='force',
                      action='store_true',
                      default=False,
                      help='allow recreation of output <directory>')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    parser.add_option('--dry-run',
                      dest='dry_run',
                      action='store_true',
                      default=False,
                      help='show available stations/channels and exit '
                      '(do not download waveforms)')

    parser.add_option('--continue',
                      dest='continue_',
                      action='store_true',
                      default=False,
                      help='continue download after a accident')

    parser.add_option('--local-data',
                      dest='local_data',
                      action='append',
                      help='add file/directory with local data')

    parser.add_option('--local-stations',
                      dest='local_stations',
                      action='append',
                      help='add local stations file')

    parser.add_option('--selection',
                      dest='selection_file',
                      action='append',
                      help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option('--local-responses-pz',
                      dest='local_responses_pz',
                      action='append',
                      help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
        '(default) or east-north-up [enu]')

    parser.add_option('--out-units',
                      choices=['M', 'M/S', 'M/S**2'],
                      dest='output_units',
                      default='M',
                      help='set output units to displacement "M" (default),'
                      ' velocity "M/S" or acceleration "M/S**2"')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=3.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
        '(default: 5)')

    parser.add_option(
        '--zero-padding',
        dest='zero_pad',
        action='store_true',
        default=False,
        help='Extend traces by zero-padding if clean restitution requires'
        'longer windows')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
        '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
        'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='geofon,iris,orfeus',
        help='sites to query (available: %s, default: "%%default"' %
        ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H,L',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option('--radius-min',
                      dest='radius_min',
                      metavar='VALUE',
                      default=0.0,
                      type=float,
                      help='minimum radius [km]')

    parser.add_option('--nstations-wanted',
                      dest='nstations_wanted',
                      metavar='N',
                      type=int,
                      help='number of stations to select initially')

    (options, args) = parser.parse_args(sys.argv[1:])

    print('Parsed arguments:', args)
    if len(args) not in (10, 7, 6):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (options.local_responses_pz, options.local_responses_resp,
                     options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 3

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        cwd = str(sys.argv[1])
        event_dir = op.join(cwd, 'data', 'events', eventname)
        output_dir = op.join(event_dir, 'waveforms')
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(latitude=lat,
                              longitude=lon,
                              minradius=options.radius_min * km * cake.m2d,
                              maxradius=radius * cake.m2d,
                              channel=','.join('%s??' % s
                                               for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ['M/S', 'M', 'M/S**2']

    # output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw', fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.raw.txt')
    fn_template_rest = op.join(output_dir, 'rest', fn_template0)
    fn_commandline = op.join(output_dir, 'beatdown.command')

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = fdsn.station(site=site,
                              format='text',
                              level='channel',
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error('No stations matching given criteria. (%s)' % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[
                        nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))

    # station weeding
    if options.nstations_wanted:
        nsls_selected = None
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon
                    try:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude.value,
                            channel.longitude.value)
                    except:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude, channel.longitude)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = float(tmax_ + tpad)

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)
                    if channel.sample_rate:
                        try:
                            deltat = 1.0 / int(channel.sample_rate.value)
                        except:
                            deltat = 1.0 / int(channel.sample_rate)
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        logger.debug('deltat %f' % deltat)
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        # apparently the WS are only sensitive to full seconds
                        # round to avoid gaps, increase safetiy window
                        selection.append(nslc +
                                         (math.floor(tmin_req - deltat * 20.0),
                                          math.ceil(tmax_req + deltat * 20.0)))
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]
                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i // neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            tr.fix_deltat_rounding_errors()
                            logger.debug('cutting window: %f - %f' %
                                         (tmin_win, tmax_win))
                            logger.debug(
                                'available window: %f - %f, nsamples: %g' %
                                (tr.tmin, tr.tmax, tr.ydata.size))
                            try:
                                logger.debug('tmin before snap %f' % tr.tmin)
                                tr.snap(interpolate=True)
                                logger.debug('tmin after snap %f' % tr.tmin)
                                tr.chop(tmin_win,
                                        tmax_win,
                                        snap=(math.floor, math.ceil),
                                        include_last=True)
                                logger.debug(
                                    'cut window: %f - %f, nsamles: %g' %
                                    (tr.tmin, tr.tmax, tr.ydata.size))
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn('an error occurred while downloading data '
                                    'for channels \n  %s' %
                                    '\n  '.join('.'.join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' %
                        (nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info('selected (%s): %i channel%s (%i station%s)' %
                        ('+'.join(sites), nchannels, plural_s(nchannels),
                         nstations, plural_s(nstations)))

        logger.info('selected total: %i channel%s (%i station%s)' %
                    (nchannels_all, plural_s(nchannels_all), nstations_all,
                     plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = fdsn.station(site=site,
                                     level='response',
                                     selection=selection)

            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        logger.info(
            'Importing local data from %s between %s (%f) and %s (%f)' %
            (options.local_data, util.time_to_str(tmin), tmin,
             util.time_to_str(tmax), tmax))
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    logger.debug('Getting response for %s' % tr.__str__())
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=options.output_units)

                    break

                except stationxml.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except stationxml.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    if tr.tmin > tmin and options.zero_pad:
                        logger.warning(
                            'Trace too short for clean restitution in '
                            'desired frequency band -> zero-padding!')
                        tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat')

                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap='crossfade_cos')

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared', fn_template1)
    fn_stations = op.join(output_dir, 'stations.prepared.txt')
    fn_event = op.join(event_dir, 'event.txt')
    fn_event_yaml = op.join(event_dir, 'event.yaml')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    traces_beat = []
    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr)
                traces_beat.append(tr_beat)
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    from pyrocko.guts import dump
    dump([event], filename=fn_event_yaml)

    utility.dump_objects(op.join(cwd, 'seismic_data.pkl'),
                         outlist=[stations, traces_beat])
    logger.info('prepared waveforms from %i stations' % len(stations))
Esempio n. 11
0
    def test_conversions(self):

        from pyrocko import model
        from pyrocko.io import resp, enhanced_sacpz
        from pyrocko.io import stationxml

        t = util.str_to_time('2014-01-01 00:00:00')
        codes = 'GE', 'EIL', '', 'BHZ'

        resp_fpath = common.test_data_file('test1.resp')
        stations = [
            model.Station(*codes[:3],
                          lat=29.669901,
                          lon=34.951199,
                          elevation=210.0,
                          depth=0.0)
        ]

        sx_resp = resp.make_stationxml(stations,
                                       resp.iload_filename(resp_fpath))

        pr_sx_resp = sx_resp.get_pyrocko_response(codes,
                                                  time=t,
                                                  fake_input_units='M/S')
        pr_evresp = trace.Evalresp(resp_fpath,
                                   nslc_id=codes,
                                   target='vel',
                                   time=t)

        sacpz_fpath = common.test_data_file('test1.sacpz')
        sx_sacpz = enhanced_sacpz.make_stationxml(
            enhanced_sacpz.iload_filename(sacpz_fpath))
        pr_sx_sacpz = sx_sacpz.get_pyrocko_response(codes,
                                                    time=t,
                                                    fake_input_units='M/S')
        pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath))
        try:
            pr_sacpz.zeros.remove(0.0j)
        except ValueError:
            pr_sacpz.poles.append(0.0j)

        sxml_geofon_fpath = common.test_data_file('test1.stationxml')
        sx_geofon = stationxml.load_xml(filename=sxml_geofon_fpath)
        pr_sx_geofon = sx_geofon.get_pyrocko_response(codes,
                                                      time=t,
                                                      fake_input_units='M/S')

        sxml_iris_fpath = common.test_data_file('test2.stationxml')
        sx_iris = stationxml.load_xml(filename=sxml_iris_fpath)
        pr_sx_iris = sx_iris.get_pyrocko_response(codes,
                                                  time=t,
                                                  fake_input_units='M/S')

        freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000)
        tf_ref = pr_evresp.evaluate(freqs)
        for pr in [
                pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon, pr_sx_iris
        ]:
            tf = pr.evaluate(freqs)
            # plot_tfs(freqs, [tf_ref, tf])
            assert cnumeqrel(tf_ref, tf, 0.01)
Esempio n. 12
0
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection and download_xml is True:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = fdsn.station(site=site,
                                     level='response',
                                     selection=selection)
            if site == "http://192.168.11.220:8080":
                sited = "bgr_internal"
            else:
                sited = site
            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                sited))
        if selection and download_xml is False:
            sited = site
            sxs[site] = stationxml.load_xml(
                filename=op.join('stations.%s.xml' % sited))
    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)
Esempio n. 13
0
 def test_read_big(self):
     for site in ['iris']:
         fpath = common.test_data_file('%s_1014-01-01_all.xml' % site)
         stationxml.load_xml(filename=fpath)
Esempio n. 14
0
def snuffler_from_commandline(args=None):
    if args is None:
        args = sys.argv[1:]

    usage = '''usage: %prog [options] waveforms ...'''
    parser = OptionParser(usage=usage)

    parser.add_option(
        '--format',
        dest='format',
        default='detect',
        choices=io.allowed_formats('load'),
        help='assume input files are of given FORMAT. Choices: %s'
             % io.allowed_formats('load', 'cli_help', 'detect'))

    parser.add_option(
        '--pattern',
        dest='regex',
        metavar='REGEX',
        help='only include files whose paths match REGEX')

    parser.add_option(
        '--stations',
        dest='station_fns',
        action='append',
        default=[],
        metavar='STATIONS',
        help='read station information from file STATIONS')

    parser.add_option(
        '--stationxml',
        dest='stationxml_fns',
        action='append',
        default=[],
        metavar='STATIONSXML',
        help='read station information from XML file STATIONSXML')

    parser.add_option(
        '--event', '--events',
        dest='event_fns',
        action='append',
        default=[],
        metavar='EVENT',
        help='read event information from file EVENT')

    parser.add_option(
        '--markers',
        dest='marker_fns',
        action='append',
        default=[],
        metavar='MARKERS',
        help='read marker information file MARKERS')

    parser.add_option(
        '--follow',
        type='float',
        dest='follow',
        metavar='N',
        help='follow real time with a window of N seconds')

    parser.add_option(
        '--cache',
        dest='cache_dir',
        default=config.config().cache_dir,
        metavar='DIR',
        help='use directory DIR to cache trace metadata '
             '(default=\'%default\')')

    parser.add_option(
        '--force-cache',
        dest='force_cache',
        action='store_true',
        default=False,
        help='use the cache even when trace attribute spoofing is active '
             '(may have silly consequences)')

    parser.add_option(
        '--store-path',
        dest='store_path',
        metavar='PATH_TEMPLATE',
        help='store data received through streams to PATH_TEMPLATE')

    parser.add_option(
        '--store-interval',
        type='float',
        dest='store_interval',
        default=600,
        metavar='N',
        help='dump stream data to file every N seconds [default: %default]')

    parser.add_option(
        '--ntracks',
        type='int',
        dest='ntracks',
        default=24,
        metavar='N',
        help='initially use N waveform tracks in viewer [default: %default]')

    parser.add_option(
        '--opengl',
        dest='opengl',
        action='store_true',
        default=False,
        help='use OpenGL for drawing')

    parser.add_option(
        '--qt5',
        dest='gui_toolkit_qt5',
        action='store_true',
        default=False,
        help='use Qt5 for the GUI')

    parser.add_option(
        '--qt4',
        dest='gui_toolkit_qt4',
        action='store_true',
        default=False,
        help='use Qt4 for the GUI')

    parser.add_option(
        '--debug',
        dest='debug',
        action='store_true',
        default=False,
        help='print debugging information to stderr')

    options, args = parser.parse_args(list(args))

    if options.debug:
        util.setup_logging('snuffler', 'debug')
    else:
        util.setup_logging('snuffler', 'warning')

    if options.gui_toolkit_qt4:
        config.override_gui_toolkit = 'qt4'

    if options.gui_toolkit_qt5:
        config.override_gui_toolkit = 'qt5'

    this_pile = pile.Pile()
    stations = []
    for stations_fn in extend_paths(options.station_fns):
        stations.extend(model.station.load_stations(stations_fn))

    for stationxml_fn in extend_paths(options.stationxml_fns):
        stations.extend(
            stationxml.load_xml(
                filename=stationxml_fn).get_pyrocko_stations())

    events = []
    for event_fn in extend_paths(options.event_fns):
        events.extend(model.load_events(event_fn))

    markers = []
    for marker_fn in extend_paths(options.marker_fns):
        markers.extend(marker.load_markers(marker_fn))

    return snuffle(
        this_pile,
        stations=stations,
        events=events,
        markers=markers,
        ntracks=options.ntracks,
        follow=options.follow,
        controls=True,
        opengl=options.opengl,
        paths=args,
        cache_dir=options.cache_dir,
        regex=options.regex,
        format=options.format,
        force_cache=options.force_cache,
        store_path=options.store_path,
        store_interval=options.store_interval)
Esempio n. 15
0
def gen_dataset(scenarios,
                projdir,
                store_id,
                modelled_channel_codes,
                magmin,
                magmax,
                depmin,
                depmax,
                latmin,
                latmax,
                lonmin,
                lonmax,
                stations_file,
                gf_store_superdirs,
                shakemap=True,
                add_noise=True,
                t_station_dropout=False,
                simple_induced=True,
                seiger=True,
                generate_scenario_type="full",
                event_list=None,
                respones="responses_bgr.xml"):

    # random station dropout
    if seiger is True:
        times_kuper, pressure_kuper, temp_kuper, rate_kuper = get_kuperkoch_data(
        )
        mean_pressure = num.mean(pressure_kuper)
        mean_temp = num.mean(temp_kuper)
        mean_rate = num.mean(rate_kuper)

    if gf_store_superdirs is None:
        engine = gf.LocalEngine(use_config=True)
    else:
        engine = gf.LocalEngine(store_superdirs=[gf_store_superdirs])
    if t_station_dropout is True:
        from pyrocko.io import stationxml
        station_xml = stationxml.load_xml(filename=responses)

    for scenario in range(scenarios):
        generated_scenario = False
        while generated_scenario is False:
            try:
                if seiger is True:
                    if generate_scenario_type is "catalog":
                        choice = num.random.choice(len(event_list), 1)
                        base_event = event_list[choice]

                    source, event = get_source_seiger(
                        generate_scenario_type,
                        magmin,
                        magmax,
                        depmin,
                        depmax,
                        latmin,
                        lonmin,
                        latmax,
                        lonmax,
                        simple_induced,
                        use_pressure=use_pressure,
                        event=base_event,
                        store_id=store_id)
                else:
                    source, event = get_source()

                savedir = projdir + '/scenario_' + str(scenario) + '/'
                if not os.path.exists(savedir):
                    os.makedirs(savedir)

                if stations_file is not None:
                    stations = model.load_stations(projdir + "/" +
                                                   stations_file)
                    targets = []
                    for st in stations:
                        for cha in st.channels:
                            target = Target(lat=st.lat,
                                            lon=st.lon,
                                            store_id=store_id,
                                            interpolation='multilinear',
                                            quantity='displacement',
                                            codes=st.nsl() + (cha.name, ))
                            targets.append(target)

                else:
                    targets = []
                    for st in stations:
                        channels = modelled_channel_codes
                        for cha in channels:
                            target = Target(lat=st.lat,
                                            lon=st.lon,
                                            store_id=store_id,
                                            interpolation='multilinear',
                                            quantity='displacement',
                                            codes=st.nsl() + (cha, ))
                        targets.append(target)
                if shakemap is True:
                    shakemap_fwd.make_shakemap(engine,
                                               source,
                                               store_id,
                                               savedir,
                                               stations=stations)
                gen_loop = True
                response = engine.process(source, targets)
                synthetic_traces = response.pyrocko_traces()
                if t_station_dropout is True:
                    station_time_dict = load_time_dependent_stations(
                        event, stations, station_xml)
                    for tr in synthetic_traces:
                        for st in station_time_dict:
                            if tr.station == st.station:
                                tr.ydata = tr.ydata * 0.
                if choice == 2:
                    synthetic_traces = gen_white_noise(synthetic_traces)
                    event.tags = ["no_event"]
                if add_noise is True and choice != 2:
                    add_white_noise(synthetic_traces)
                noise_events = gen_noise_events(targets, synthetic_traces,
                                                engine)

                events = [event]
                save(synthetic_traces,
                     events,
                     stations,
                     savedir,
                     noise_events=noise_events)
                generated_scenario = True
            except pyrocko.gf.seismosizer.SeismosizerError:
                pass
Esempio n. 16
0
def read_xml(self, xml_format):
    """
    Read station meta data and response file. Current version only uses xml created obspy format
    which is slightly different from xml created with pyrocko. We haven't yet tested with xml created
    by other applications/codes so it may work for those too.

    Parameters:
    xml_format (int): input_file_type, 1--obspy; 2--pyrocko;

    Returns:
    --------
    stationxml (): station metadata, e.g. location and name network, and response file.
    stationlist (dict): two-level dictionary contains the stations info.
        [station_name][station_info],
                      station_info:
                      'lat':latitude
                      'lon':longitude
                      'elev':elevation
                      'pre_filt': pre-defined filtering frequency range for response correction

    """
    #load stationxml
    stationlist = {}
    if xml_format == 1:
        from obspy import read_inventory
        stationxml = read_inventory(self.maindir + '/input/stations.xml',
                                    format='STATIONXML')
        #        if len(stationxml._networks) == 1:
        #            for i in stationxml._networks[0]:
        #                stationlist[i._code] = {}
        #                stationlist[i._code]['lat'] = i._latitude
        #                stationlist[i._code]['lon'] = i._longitude
        #                stationlist[i._code]['elev'] = i._elevation/1000.
        #                stationlist[i._code]['pre_filt'] = []
        #        else:
        for j in stationxml._networks:
            for i in j:
                stationlist[i._code] = {}
                stationlist[i._code]['lat'] = i._latitude
                stationlist[i._code]['lon'] = i._longitude
                stationlist[i._code]['elev'] = i._elevation / 1000.
                stationlist[i._code]['pre_filt'] = []
        self.stationxml = stationxml
        self.stationlist = stationlist
    elif xml_format == 2:
        from pyrocko.io import stationxml
        stationxml = stationxml.load_xml(
            filename=self.maindir +
            '/input/stations.xml').get_pyrocko_stations()
        for i in stationxml:
            j = i.station
            stationlist[j] = {}
            stationlist[j]['lat'] = i.lat
            stationlist[j]['lon'] = i.lon
            stationlist[j]['elev'] = i.elevation / 1000.
            stationlist[j]['pre_filt'] = []
        self.stationlist = stationlist
        self.stationxml = stationxml
    else:
        self.stationxml = None
        self.stationlist = None

    return None
Esempio n. 17
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option(
        "--force",
        dest="force",
        action="store_true",
        default=False,
        help="allow recreation of output <directory>",
    )

    parser.add_option(
        "--debug",
        dest="debug",
        action="store_true",
        default=False,
        help="print debugging information to stderr",
    )

    parser.add_option(
        "--dry-run",
        dest="dry_run",
        action="store_true",
        default=False,
        help="show available stations/channels and exit "
        "(do not download waveforms)",
    )

    parser.add_option(
        "--continue",
        dest="continue_",
        action="store_true",
        default=False,
        help="continue download after a accident",
    )

    parser.add_option(
        "--local-data",
        dest="local_data",
        action="append",
        help="add file/directory with local data",
    )

    parser.add_option(
        "--local-stations",
        dest="local_stations",
        action="append",
        help="add local stations file",
    )

    parser.add_option(
        "--local-responses-resp",
        dest="local_responses_resp",
        action="append",
        help="add file/directory with local responses in RESP format",
    )

    parser.add_option(
        "--local-responses-pz",
        dest="local_responses_pz",
        action="append",
        help="add file/directory with local pole-zero responses",
    )

    parser.add_option(
        "--local-responses-stationxml",
        dest="local_responses_stationxml",
        help="add file with local response information in StationXML format",
    )

    parser.add_option(
        "--window",
        dest="window",
        default="full",
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        "] (time format is YYYY-MM-DD HH:MM:SS)",
    )

    parser.add_option(
        "--out-components",
        choices=["enu", "rtu"],
        dest="out_components",
        default="rtu",
        help="set output component orientations to radial-transverse-up [rtu] "
        "(default) or east-north-up [enu]",
    )

    parser.add_option(
        "--padding-factor",
        type=float,
        default=3.0,
        dest="padding_factor",
        help="extend time window on either side, in multiples of 1/<fmin_hz> "
        "(default: 5)",
    )

    parser.add_option(
        "--credentials",
        dest="user_credentials",
        action="append",
        default=[],
        metavar="SITE,USER,PASSWD",
        help="user credentials for specific site to access restricted data "
        "(this option can be repeated)",
    )

    parser.add_option(
        "--token",
        dest="auth_tokens",
        metavar="SITE,FILENAME",
        action="append",
        default=[],
        help="user authentication token for specific site to access "
        "restricted data (this option can be repeated)",
    )

    parser.add_option(
        "--sites",
        dest="sites",
        metavar="SITE1,SITE2,...",
        #    default='bgr',
        default="http://ws.gpi.kit.edu,bgr,http://188.246.25.142:8080",
        help='sites to query (available: %s, default: "%%default"' %
        ", ".join(g_sites_available),
    )

    parser.add_option(
        "--band-codes",
        dest="priority_band_code",
        metavar="V,L,M,B,H,S,E,...",
        default="V,L,M,B,H,E",
        help="select and prioritize band codes (default: %default)",
    )

    parser.add_option(
        "--instrument-codes",
        dest="priority_instrument_code",
        metavar="H,L,G,...",
        default="H,L,O,",
        help="select and prioritize instrument codes (default: %default)",
    )

    parser.add_option(
        "--radius-min",
        dest="radius_min",
        metavar="VALUE",
        default=0.0,
        type=float,
        help="minimum radius [km]",
    )

    parser.add_option(
        "--tinc",
        dest="tinc",
        metavar="VALUE",
        default=3600.0 * 12.0,
        type=float,
        help="length of seperate saved files in s",
    )

    parser.add_option(
        "--nstations-wanted",
        dest="nstations_wanted",
        metavar="N",
        type=int,
        help="number of stations to select initially",
    )

    (options, args) = parser.parse_args(sys.argv[1:])
    if len(args) not in (9, 6, 5):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, "debug")
    else:
        util.setup_logging(program_name, "info")

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical("cannot use local responses in PZ and RESP "
                        "format at the same time")
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (
            options.local_responses_pz,
            options.local_responses_resp,
            options.local_responses_stationxml,
    ):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical("can only handle local responses from either PZ or "
                        "RESP or StationXML. Cannot yet merge different "
                        "response formats.")
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical("--local-responses-resp can only be used "
                        "when --stations is also given.")
        sys.exit(1)

    try:
        ename = ""
        magnitude = None
        mt = None
        if len(args) == 9:
            time = util.str_to_time(args[0] + " " + args[1])
            lat = float(args[2])
            lon = float(args[3])
            depth = float(args[4]) * km
            iarg = 5

        elif len(args) == 6:
            if args[1].find(":") == -1:
                sname_or_date = None
                lat = float(args[0])
                lon = float(args[1])
                event = None
                time = None
            else:
                sname_or_date = args[0] + " " + args[1]

            iarg = 2

        elif len(args) == 5:
            sname_or_date = args[0]
            iarg = 1

        if len(args) in (6, 5) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical("no event found")
                sys.exit(1)
            elif len(events) > 1:
                logger.critical("more than one event found")
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        event_dir = op.join("data", "events", eventname)
        output_dir = op.join(event_dir, "waveforms")
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(
            time=time,
            lat=lat,
            lon=lon,
            depth=depth,
            name=ename,
            magnitude=magnitude,
            moment_tensor=mt,
        )

    if options.window == "full":
        if event is None:
            logger.critical("need event for --window=full")
            sys.exit(1)

        low_velocity = 1500.0
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == "p":
        if event is None:
            logger.critical("need event for --window=p")
            sys.exit(1)

        phases = list(map(cake.PhaseDef, "P p".split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error("required phase arrival not found")
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(",")
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tlen = tmax - tmin
    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    priority_band_code = options.priority_band_code.split(",")
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical("invalid band code: %s" % s)

    priority_instrument_code = options.priority_instrument_code.split(",")
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical("invalid instrument code: %s" % s)

    station_query_conf = dict(
        latitude=lat,
        longitude=lon,
        minradius=options.radius_min * km * cake.m2d,
        maxradius=radius * cake.m2d,
        channel=",".join("?%s?" % s for s in priority_band_code),
    )

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ["M/S", "M", "M/S**2"]

    output_units = "M"

    sites = [x.strip() for x in options.sites.split(",") if x.strip()]
    tinc = options.tinc
    #  for site in sites:
    #     if site not in g_sites_available:
    #        logger.critical('unknown FDSN site: %s' % site)
    #       sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(",")
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(",")
            with open(token_filename, "r") as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical("cannot get token from file: %s" % token_filename)
            sys.exit(1)

    fn_template0 = (
        "data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed")

    fn_template_raw = op.join(output_dir, "raw", fn_template0)
    fn_template_raw_folder = op.join(output_dir, "raw/", "traces.mseed")
    fn_stations_raw = op.join(output_dir, "stations.raw.txt")
    fn_template_rest = op.join(output_dir, "rest", fn_template0)
    fn_commandline = op.join(output_dir, "seigerdown.command")

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                "iris": dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == "geonet":
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(
                    startbefore=tmax,
                    endafter=tmin,
                    includerestricted=(site in g_user_credentials
                                       or site in g_auth_tokens),
                )

            logger.info("downloading channel information (%s)" % site)
            sx = fdsn.station(site=site,
                              format="text",
                              level="channel",
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error("No stations matching given criteria. (%s)" % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}
    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()
            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                nsl_to_station[nsl] = s  # using first site with this station
    logger.info("number of stations found: %i" % len(nsl_to_station))

    # station weeding

    nsls_selected = None
    if options.nstations_wanted:
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info("number of stations selected: %i" % len(nsls_selected))

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info("time window %i/%i (%s - %s)" %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win),
            )

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon

                    dist = orthodrome.distance_accurate50m_numpy(
                        lat_, lon_, channel.latitude.value,
                        channel.longitude.value)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = tmax_ + tpad

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)

                    if channel.sample_rate:
                        deltat = 1.0 / channel.sample_rate.value
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        selection.append(nslc + (tmin_req - deltat * 10.0,
                                                 tmax_req + deltat * 10.0))

            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]

                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ""
                        if nbatches > 1:
                            sbatch = " (batch %i/%i)" % (
                                (i // neach) + 1, nbatches)

                        logger.info("downloading data (%s)%s" % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            if tr.station == "7869":
                                tr.station = "MOER"
                                tr.network = "LE"
                                tr.location = ""
                            try:
                                tr.chop(tmin_win, tmax_win)
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        io.save(trs, fn_template_raw_folder)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn("overwriting file %s", fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn("an error occurred while downloading data "
                                    "for channels \n  %s" %
                                    "\n  ".join(".".join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return "" if x == 1 else "s"

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info("selected: %s.%s.%s.%s from site%s %s" %
                        (nslc + (plural_s(len(sites)), "+".join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info("selected (%s): %i channel%s (%i station%s)" % (
                "+".join(sites),
                nchannels,
                plural_s(nchannels),
                nstations,
                plural_s(nstations),
            ))

        logger.info("selected total: %i channel%s (%i station%s)" % (
            nchannels_all,
            plural_s(nchannels_all),
            nstations_all,
            plural_s(nstations_all),
        ))

        logger.info("dry run done.")
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info("downloading response information (%s)" % site)
            sxs[site] = fdsn.station(site=site,
                                     level="response",
                                     selection=selection)
            sited = site

            if site == "http://192.168.11.220:8080":
                sited = "bgr_internal"
            elif site == "http://ws.gpi.kit.edu":
                sited = "kit"
            if site == "http://188.246.25.142:8080":
                sited = "moer"

            sxs[site].dump_xml(filename=op.join(output_dir, "stations.%s.xml" %
                                                sited))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site["local"] = set()
        plocal = pile.make_pile(options.local_data, fileformat="detect")
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.station == "7869":
                    tr.station = "MOER"
                    tr.network = "LE"
                    tr.location = ""
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site["local"].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append("local")

    if options.local_responses_pz:
        sxs["local"] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs["local"] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs["local"] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error("no data available")
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.0)
    otinc = 3600.0
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            if tr.station == "7869":
                tr.station = "MOER"
                tr.network = "LE"
                tr.location = ""
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=output_units,
                    )

                    break

                except stationxml.NoResponseInformation:
                    failure.append("%s: no response information" % site)

                except stationxml.MultipleResponseInformation:
                    failure.append("%s: multiple response information" % site)

            if response is None:
                failure = ", ".join(failure)

            else:
                failure = ""
                try:
                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = "trace too short"

            if failure:
                logger.warn("failed to restitute trace %s.%s.%s.%s (%s)" %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap="crossfade_cos")

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    if tr.station == "7869":
                        tr.station = "MOER"
                        tr.network = "LE"
                        tr.location = ""
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = "DISPL.%(network)s.%(station)s.%(location)s.%(channel)s"

    fn_waveforms = op.join(output_dir, "prepared", fn_template1)
    fn_stations = op.join(output_dir, "stations.prepared.txt")
    fn_event = op.join(event_dir, "event.txt")

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        keep = []
        for tr in traces:
            if deltat is not None:
                try:
                    tr.downsample_to(deltat, snap=True, allow_upsample_max=5)
                    keep.append(tr)
                except util.UnavailableDecimation as e:
                    logger.warn("Cannot downsample %s.%s.%s.%s: %s" %
                                (tr.nslc_id + (e, )))
                    continue

        if options.out_components == "rtu":
            pios = s.guess_projections_to_rtu(out_channels=("R", "T", "Z"))
        elif options.out_components == "enu":
            pios = s.guess_projections_to_enu(out_channels=("E", "N", "Z"))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    logger.info("prepared waveforms from %i stations" % len(stations))
Esempio n. 18
0
def station(
        site=g_default_site,
        url=g_url,
        majorversion=1,
        timeout=g_timeout,
        check=True,
        selection=None,
        parsed=True,
        **kwargs):

    '''
    Query FDSN web service for station metadata.

    :param site:
        :ref:`Registered site name <registered-site-names>` or full base URL of
        the service (e.g. ``'https://geofon.gfz-potsdam.de'``).
    :type site: str, optional
    :param url:
        URL template (default should work in 99% of cases).
    :type url: str, optional
    :param majorversion:
        Major version of the service to query (always ``1`` at the time of
        writing).
    :type majorversion: int, optional
    :param timeout:
        Network timeout in [s]. Global default timeout can be configured in
        Pyrocko's configuration file under ``fdsn_timeout``.
    :type timeout: float, optional
    :param check:
        If ``True`` arguments are checked against self-description (WADL) of
        the queried web service if available or FDSN specification.
    :type check: bool, optional
    :param selection:
        If given, selection to be queried as a list of tuples
        ``(network, station, location, channel, tmin, tmax)``. Useful for
        detailed queries.
    :type selection: list of tuples, optional
    :param parsed:
        If ``True`` parse received content into
        :py:class:`~pyrocko.io.stationxml.FDSNStationXML`
        object, otherwise return open file handle to raw data stream.
    :type parsed: bool, optional
    :param \\*\\*kwargs:
        Parameters passed to the server (see `FDSN web services specification
        <https://www.fdsn.org/webservices>`_).

    :returns:
        See description of ``parsed`` argument above.

    :raises:
        On failure, :py:exc:`~pyrocko.util.DownloadError` or one of its
        sub-types defined in the :py:mod:`~pyrocko.client.fdsn` module is
        raised.
    '''

    service = 'station'

    if check:
        check_params(service, site, url, majorversion, timeout, **kwargs)

    params = fix_params(kwargs)

    url = fillurl(service, site, url, majorversion, 'query')
    if selection:
        lst = []
        for k, v in params.items():
            lst.append('%s=%s' % (k, v))

        for (network, station, location, channel, tmin, tmax) in selection:
            if location == '':
                location = '--'

            lst.append(' '.join((network, station, location, channel,
                                 sdatetime(tmin), sdatetime(tmax))))

        post = '\n'.join(lst)
        params = dict(post=post.encode())

    if parsed:
        from pyrocko.io import stationxml
        format = kwargs.get('format', 'xml')
        if format == 'text':
            if kwargs.get('level', 'station') == 'channel':
                return stationxml.load_channel_table(
                    stream=_request(url, timeout=timeout, **params))
            else:
                raise InvalidRequest('if format="text" shall be parsed, '
                                     'level="channel" is required')

        elif format == 'xml':
            return stationxml.load_xml(
                stream=_request(url, timeout=timeout, **params))
        else:
            raise InvalidRequest('format must be "xml" or "text"')
    else:
        return _request(url, timeout=timeout, **params)
Esempio n. 19
0
        if tr.station == st.station and tr.location == st.location:
                stations_real_iris.append(st)
                gaps.append(st.station)
remove =[x for x in gaps if gaps.count(x) > 1]
for re in remove:
    for st in stations_real_iris:
        if st.station == re:
            stations_real_iris.remove(st)
model.dump_stations(stations_real_iris, os.path.join(sdspath,'stations_iris.txt'))

request_response = fdsn.station(
    site=site, selection=selection_iris, level='response')
# save the response in YAML and StationXML format
request_response.dump(filename=os.path.join(sdspath,'responses_iris.yml'))
request_response.dump_xml(filename=os.path.join(sdspath,'responses_iris.xml'))
sx = stationxml.load_xml(filename=os.path.join(sdspath,'responses_iris.xml'))
pyrocko_stations = sx.get_pyrocko_stations()
#model.dump_stations(stations_real, os.path.join(sdspath,'stations2.txt'))

# Loop through retrieved waveforms and request meta information
# for each trace
event_origin = gf.Source(
lat=event.lat,
lon=event.lon)

traces_iris = io.load(os.path.join(sdspath,'traces_iris.mseed'))


displacement_iris = []
stations_disp_iris = []
for tr in traces_iris:
Esempio n. 20
0
from pyrocko.io import stationxml
from pyrocko.example import get_example_data

# Download example StationXML file
get_example_data('responses.xml')

# load the StationXML downloaded data file
sx = stationxml.load_xml(filename='responses.xml')

comp_to_azi_dip = {
    'X': (0., 0.),
    'Y': (90., 0.),
    'Z': (0., -90.),
}

# step through all the networks within the data file
for network in sx.network_list:

    # step through all the stations per networks
    for station in network.station_list:

        # step through all the channels per stations
        for channel in station.channel_list:
            azi, dip = comp_to_azi_dip[channel.code[-1]]

            # change the azimuth and dip of the channel per channel alpha code
            channel.azimuth.value = azi
            channel.dip.value = dip

            # set the instrument input units to 'M'eters
            channel.response.instrument_sensitivity.input_units.name = 'M'
Esempio n. 21
0
 def test_read_big(self):
     for site in ['iris']:
         fpath = common.test_data_file('%s_1014-01-01_all.xml' % site)
         stationxml.load_xml(filename=fpath)
Esempio n. 22
0
    def test_conversions(self):

        from pyrocko import model
        from pyrocko.io import resp, enhanced_sacpz
        from pyrocko.io import stationxml

        t = util.str_to_time('2014-01-01 00:00:00')
        codes = 'GE', 'EIL', '', 'BHZ'

        resp_fpath = common.test_data_file('test1.resp')
        stations = [model.Station(
            *codes[:3],
            lat=29.669901,
            lon=34.951199,
            elevation=210.0,
            depth=0.0)]

        sx_resp = resp.make_stationxml(
            stations, resp.iload_filename(resp_fpath))

        sx_resp.validate()

        assert sx_resp.network_list[0].station_list[0].channel_list[0] \
            .dip is None

        stations[0].set_channels_by_name('BHE', 'BHN', 'BHZ')

        sx_resp2 = resp.make_stationxml(
            stations, resp.iload_filename(resp_fpath))

        sx_resp2.validate()

        assert sx_resp2.network_list[0].station_list[0].channel_list[0] \
            .dip.value == -90.0

        pr_sx_resp = sx_resp.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')
        pr_evresp = trace.Evalresp(
            resp_fpath, nslc_id=codes, target='vel', time=t)

        sacpz_fpath = common.test_data_file('test1.sacpz')
        sx_sacpz = enhanced_sacpz.make_stationxml(
            enhanced_sacpz.iload_filename(sacpz_fpath))
        pr_sx_sacpz = sx_sacpz.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')
        pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath))
        try:
            pr_sacpz.zeros.remove(0.0j)
        except ValueError:
            pr_sacpz.poles.append(0.0j)

        sx_sacpz_resp = \
            sx_sacpz.network_list[0].station_list[0].channel_list[0].response
        sx_sacpz_resp2 = pz.read_to_stationxml_response(
            input_unit=sx_sacpz_resp.instrument_sensitivity.input_units.name,
            output_unit=sx_sacpz_resp.instrument_sensitivity.output_units.name,
            normalization_frequency=10.,
            filename=sacpz_fpath)
        pr_sx_sacpz2 = sx_sacpz_resp2.get_pyrocko_response(codes)
        try:
            pr_sx_sacpz2.responses[0].zeros.remove(0.0j)
        except ValueError:
            pr_sx_sacpz2.responses[0].poles.append(0.0j)

        sxml_geofon_fpath = common.test_data_file('test1.stationxml')
        sx_geofon = stationxml.load_xml(filename=sxml_geofon_fpath)
        pr_sx_geofon = sx_geofon.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')

        sxml_iris_fpath = common.test_data_file('test2.stationxml')
        sx_iris = stationxml.load_xml(filename=sxml_iris_fpath)
        pr_sx_iris = sx_iris.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')

        freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000)
        tf_ref = pr_evresp.evaluate(freqs)
        for pr in [pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon,
                   pr_sx_iris, pr_sx_sacpz2]:
            tf = pr.evaluate(freqs)
            # plot_tfs(freqs, [tf_ref, tf])
            assert cnumeqrel(tf_ref, tf, 0.01)