Exemplo n.º 1
0
def get_stations(site,
                 lat,
                 lon,
                 rmin,
                 rmax,
                 tmin,
                 tmax,
                 channel_pattern='BH?'):
    from pyrocko.fdsn import ws

    extra = {}
    if site == 'iris':
        extra.update(matchtimeseries=True)

    sx = ws.station(site=site,
                    latitude=lat,
                    longitude=lon,
                    minradius=rmin,
                    maxradius=rmax,
                    startbefore=tmin,
                    endafter=tmax,
                    channel=channel_pattern,
                    format='text',
                    level='channel',
                    includerestricted=False,
                    **extra)

    return sx.get_pyrocko_stations()
Exemplo n.º 2
0
    def test_retrieve(self):
        for site in ['geofon', 'iris']:
            fsx = fdsn_ws.station(site=site,
                                  network='GE',
                                  station='EIL',
                                  level='channel')

            assert len(fsx.get_pyrocko_stations(
                time=stt('2010-01-15 10:00:00'))) == 1
Exemplo n.º 3
0
    def test_retrieve(self):
        for site in ['geofon', 'iris']:
            fsx = fdsn_ws.station(site=site,
                                  network='GE',
                                  station='EIL',
                                  level='channel')

            assert len(
                fsx.get_pyrocko_stations(time=stt('2010-01-15 10:00:00'))) == 1
Exemplo n.º 4
0
def combi_get_responses(stations, time, fn_template):
    from pyrocko.fdsn import ws
    from pyrocko.fdsn import station as fdsnstation

    def fn(net, sta, loc, cha):
        return fn_template % dict(
            network=net, station=sta, location=loc, channel=cha)

    def iter_nslcs(site=None, ignore=None):
        for station in stations:
            if site is not None and site not in station.datacenters:
                continue

            for channel in station.get_channels():
                nslc = station.nsl() + (channel.name, )
                if ignore is None or nslc not in ignore:
                    yield nslc

    responses = {}
    for nslc in iter_nslcs():
        if os.path.exists(fn(*nslc)):
            responses[nslc] = pload(fn(*nslc))

    for site in ['geofon', 'iris']:
        selection = []
        for nslc in iter_nslcs(site=site, ignore=responses):
            selection.append(nslc + (time, time + 1.0))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs = ws.station(site=site, level='response', selection=selection)

            for nslc_tspan in selection:
                nslc = nslc_tspan[:4]
                timespan = nslc_tspan[4:]
                try:
                    response = sxs.get_pyrocko_response(nslc,
                                                        timespan=timespan,
                                                        fake_input_units='M')

                    util.ensuredirs(fn(*nslc))
                    pdump(response, fn(*nslc))
                    responses[nslc] = response

                except (fdsnstation.NoResponseInformation,
                        fdsnstation.MultipleResponseInformation):
                    pass

    for station in stations:
        for channel in station.get_channels():
            nslc = station.nsl() + (channel.name, )
            if nslc in responses:
                channel.response = responses[nslc]
            else:
                channel.response = None
Exemplo n.º 5
0
def gen_real_stations(tmin=util.stt('2014-01-01 16:10:00.000'),
                      tmax=util.stt('2014-01-01 16:39:59.000')):

    stations = []
    selection = [
        ('*', '*', '*', 'BH*', tmin, tmax),
    ]
    request_response = ws.station(site='iris',
                                  selection=selection,
                                  level='response')

    request_response.dump_xml(filename='stations.xml')
    sx = fs.load_xml(filename='stations.xml')

    for station in sx.get_pyrocko_stations():
        stations.append(station)

    return stations
Exemplo n.º 6
0
    def call(self):
        '''Main work routine of the snuffling.'''

        self.cleanup()

        view = self.get_viewer()

        tmin, tmax = view.get_time_range()
        if self.useevent:
            markers = view.selected_markers()
            if len(markers) != 1:
                self.fail('Exactly one marker must be selected.')
            marker = markers[0]
            if not isinstance(marker, EventMarker):
                self.fail('An event marker must be selected.')

            ev = marker.get_event()

            lat, lon = ev.lat, ev.lon
        else:
            lat, lon = self.lat, self.lon

        site = self.datacenter.lower()
        try:
            kwargs = {}
            if site == 'iris':
                kwargs['matchtimeseries'] = True

            sx = fdsn_ws.station(
                site=site, latitude=lat, longitude=lon,
                minradius=self.minradius, maxradius=self.maxradius,
                startbefore=tmin, endafter=tmax, channel=self.channel_pattern,
                format='text', level='channel', includerestricted=False,
                **kwargs)

        except fdsn_ws.EmptyResult:
            self.fail('No stations matching given criteria.')

        stations = sx.get_pyrocko_stations()
        networks = set([s.network for s in stations])

        t2s = util.time_to_str
        dir = self.tempdir()
        fns = []
        for net in networks:
            nstations = [s for s in stations if s.network == net]
            selection = fdsn_ws.make_data_selection(nstations, tmin, tmax)
            if selection:
                for x in selection:
                    logger.info(
                        'Adding data selection: %s.%s.%s.%s %s - %s'
                        % (tuple(x[:4]) + (t2s(x[4]), t2s(x[5]))))

                try:
                    d = fdsn_ws.dataselect(site=site, selection=selection)
                    fn = pjoin(dir, 'data-%s.mseed' % net)
                    f = open(fn, 'w')
                    f.write(d.read())
                    f.close()
                    fns.append(fn)

                except fdsn_ws.EmptyResult:
                    pass

        all_traces = []
        for fn in fns:
            try:
                traces = list(io.load(fn))

                all_traces.extend(traces)

            except io.FileLoadError, e:
                logger.warning('File load error, %s' % e)
Exemplo n.º 7
0
    def call(self):
        '''Main work routine of the snuffling.'''

        self.cleanup()

        view = self.get_viewer()

        tmin, tmax = view.get_time_range()
        if self.useevent:
            markers = view.selected_markers()
            if len(markers) != 1:
                self.fail('Exactly one marker must be selected.')
            marker = markers[0]
            if not isinstance(marker, EventMarker):
                self.fail('An event marker must be selected.')

            ev = marker.get_event()

            lat, lon = ev.lat, ev.lon
        else:
            lat, lon = self.lat, self.lon

        site = self.datacenter.lower()
        try:
            kwargs = {}
            if site == 'iris':
                kwargs['matchtimeseries'] = True

            sx = fdsn_ws.station(site=site,
                                 latitude=lat,
                                 longitude=lon,
                                 minradius=self.minradius,
                                 maxradius=self.maxradius,
                                 startbefore=tmin,
                                 endafter=tmax,
                                 channel=self.channel_pattern,
                                 format='text',
                                 level='channel',
                                 includerestricted=False,
                                 **kwargs)

        except fdsn_ws.EmptyResult:
            self.fail('No stations matching given criteria.')

        stations = sx.get_pyrocko_stations()
        networks = set([s.network for s in stations])

        t2s = util.time_to_str
        dir = self.tempdir()
        fns = []
        for net in networks:
            nstations = [s for s in stations if s.network == net]
            selection = fdsn_ws.make_data_selection(nstations, tmin, tmax)
            if selection:
                for x in selection:
                    logger.info('Adding data selection: %s.%s.%s.%s %s - %s' %
                                (tuple(x[:4]) + (t2s(x[4]), t2s(x[5]))))

                try:
                    d = fdsn_ws.dataselect(site=site, selection=selection)
                    fn = pjoin(dir, 'data-%s.mseed' % net)
                    f = open(fn, 'w')
                    f.write(d.read())
                    f.close()
                    fns.append(fn)

                except fdsn_ws.EmptyResult:
                    pass

        all_traces = []
        for fn in fns:
            try:
                traces = list(io.load(fn))

                all_traces.extend(traces)

            except io.FileLoadError, e:
                logger.warning('File load error, %s' % e)
    def download(self, event, directory='array_data', timing=None, length=None,
                 want='all', force=False, prefix=False, dump_config=False,
                 get_responses=False):
        """:param want: either 'all' or ID as string or list of IDs as strings
        """
        use = []
        #ts = {}
        unit = 'M'
        if all([timing, length]) is None:
            raise Exception('Define one of "timing" and "length"')
        prefix = prefix or ''
        directory = pjoin(prefix, directory)
        if not os.path.isdir(directory):
            os.mkdir(directory)
        pzresponses = {}
        logger.info('download data: %s at %sN %sE' % (
            event.name, event.lat, event.lon))
        for site, array_data_provder in self.providers.items():
            logger.info('requesting data from site %s' % site)
            for array_id, codes in array_data_provder.items():
                if array_id not in want and want != ['all']:
                    continue
                sub_directory = pjoin(directory, array_id)
                logger.info("%s" % array_id)
                codes = array_data_provder[array_id]
                if not isinstance(codes, list):
                    codes = [codes]
                selection = [
                    c + tuple((event.time, event.time+1000.)) for c in codes]
                logger.debug('selection: %s' % selection)
                try:
                #    if site=='bgr':
                #        st = ws.station(url='http://eida.bgr.de/', selection=selection)
                #    else:
                #        st = ws.station(site=site, selection=selection)
                    st = ws.station(site=site, selection=selection)
                except ws.EmptyResult as e:
                    logging.error('No results: %s %s. skip' % (e, array_id))
                    continue
                except ValueError as e:
                    logger.error(e)
                    logger.error('...skipping...')
                    continue

                stations = st.get_pyrocko_stations()
                min_dist = min(
                    [ortho.distance_accurate50m(s, event) for s in stations])
                max_dist = max(
                    [ortho.distance_accurate50m(s, event) for s in stations])

                mod = cake.load_model(crust2_profile=(event.lat, event.lon))
                if length:
                    tstart = 0.
                    tend = length
                elif timing:
                    tstart = timing[0].t(mod, (event.depth, min_dist))
                    tend = timing[1].t(mod, (event.depth, max_dist))
                selection = [
                    c + tuple((event.time + tstart, event.time + tend)
                              ) for c in codes]
                try:
                    d = ws.dataselect(site=site, selection=selection)
                    store.remake_dir(sub_directory, force)
                    store.remake_dir(pjoin(sub_directory, 'responses'), force)
                    fn = pjoin(sub_directory, 'traces.mseed')
                    with open(fn, 'w') as f:
                        f.write(d.read())
                        f.close()
                    if get_responses:
                        trs = io.load(fn, getdata=False)
                        logger.info('Request responses from %s' % site)
                        if progressbar:
                            pb = progressbar.ProgressBar(maxval=len(trs)).start()
                        for i_tr, tr in enumerate(trs):
                            try:
                                st = ws.station(
                                    site=site, selection=selection, level='response')
                                pzresponse = st.get_pyrocko_response(
                                    nslc=tr.nslc_id,
                                    timespan=(tr.tmin, tr.tmax),
                                    fake_input_units=unit)
                                pzresponse.regularize()
                            except fdsnstation.NoResponseInformation as e:
                                logger.warn("no response information: %s" % e)
                                pzresponse = None
                                pass
                            except fdsnstation.MultipleResponseInformation as e:
                                logger.warn("MultipleResponseInformation: %s" % e)
                                pzresponse = None
                                pass
                            pzresponses[tr.nslc_id] = pzresponse
                            pzresponses[tr.nslc_id].dump(filename=pjoin(
                                sub_directory,
                                'responses',
                                'resp_%s.yaml' % '.'.join(tr.nslc_id)))
                            if progressbar:
                                pb.update(i_tr)
                        if progressbar:
                            pb.finish()
                    model.dump_stations(
                        stations, pjoin(sub_directory, 'stations.pf'))

                    if timing:
                        t = Timings(list(timing))
                        self.timings[array_id] = t
                    if array_id not in use and array_id not in self.use:
                        use.append(array_id)
                except ws.EmptyResult as e:
                    logging.error('%s on %s' % (e, array_id))

        self.use.extend(use)
Exemplo n.º 9
0
def main():
    parser = OptionParser(
        usage=usage,
        description=description)

    parser.add_option(
        '--force',
        dest='force',
        action='store_true',
        default=False,
        help='allow recreation of output <directory>')

    parser.add_option(
        '--debug',
        dest='debug',
        action='store_true',
        default=False,
        help='print debugging information to stderr')

    parser.add_option(
        '--dry-run',
        dest='dry_run',
        action='store_true',
        default=False,
        help='show available stations/channels and exit '
             '(do not download waveforms)')

    parser.add_option(
        '--continue',
        dest='continue_',
        action='store_true',
        default=False,
        help='continue download after a accident')

    parser.add_option(
        '--local-data',
        dest='local_data',
        action='append',
        help='add file/directory with local data')

    parser.add_option(
        '--local-stations',
        dest='local_stations',
        action='append',
        help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option(
        '--local-responses-pz',
        dest='local_responses_pz',
        action='append',
        help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
             '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
             '(default) or east-north-up [enu]')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=6.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
             '(default: 5)')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
             '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
             'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='geofon,iris',
        help='sites to query (available: %s, default: "%%default"'
        % ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H,L',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option(
        '--radius-min',
        dest='radius_min',
        metavar='VALUE',
        default=0.0,
        type=float,
        help='minimum radius [km]')

    parser.add_option(
        '--nstations-wanted',
        dest='nstations_wanted',
        metavar='N',
        type=int,
        help='number of stations to select initially')

    parser.add_option(
        '--magmin',
        dest='magmin',
        metavar='VALUE',
        default=6.0,
        type=float,
        help='minimum magnitude of events')

    parser.add_option(
        '--minlen',
        dest='minlen',
        metavar='VALUE',
        default=2100.0,
        type=float,
        help='minimum length of traces')

    parser.add_option(
        '--selection',
        dest='selection_file',
        action='append',
        help='add local stations file')


    (options, args) = parser.parse_args(sys.argv[1:])
    magmin = options.magmin
    minlen = options.minlen
    if len(args) not in (9, 6, 5):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (
            options.local_responses_pz,
            options.local_responses_resp,
            options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 2

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon, magmin=magmin)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg])*km
        fmin = float(args[iarg+1])
        sample_rate = float(args[iarg+2])

        eventname = args[iarg+3]
        cwd = str(sys.argv[1])
        event_dir = op.join('events', cwd, 'data')
        output_dir = op.join(event_dir)
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(
            time=time, lat=lat, lon=lon, depth=depth, name=ename,
            magnitude=magnitude, moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(
            low_velocity, tpad=options.padding_factor/fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"'
                            % options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tlen = tmax - tmin
    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad+1000.
    tmax += tpad+1000.

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(
        latitude=lat,
        longitude=lon,
        minradius=options.radius_min*km*cake.m2d,
        maxradius=radius*cake.m2d,
        channel=','.join('?%s?' % s for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ['M/S', 'M', 'M/S**2']

    output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw',  fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.txt')
    fn_template_rest = op.join(output_dir, 'rest',  fn_template0)
    fn_commandline = op.join(output_dir, 'palantiriown.command')

    ftap = (ffade_factors[0]*fmin, fmin, fmax, ffade_factors[1]*fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(
                    starttime=tmin,
                    endtime=tmax)
            else:
                extra_args.update(
                    startbefore=tmax,
                    endafter=tmin,
                    includerestricted=(
                        site in g_user_credentials or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = ws.station(
                site=site,
                format='text',
                level='channel',
                **extra_args)

        except ws.EmptyResult:
            logger.error('No stations matching given criteria.')
            sx = None

        sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))
    # station weeding

    nsls_selected = None
    if options.nstations_wanted:
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(
            stations_all, options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' % (it+1, nt,
                                                     util.tts(tmin_win),
                                                     util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)

        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon

                    dist = orthodrome.distance_accurate50m_numpy(
                        lat_, lon_,
                        channel.latitude.value, channel.longitude.value)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None
                    try:
                        tmin_, tmax_ = timewindow(time_, dist, depth_)

                        tmin_this = tmin_ - tpad
                        tmax_this = tmax_ + tpad

                        tmin_req = max(tmin_win, tmin_this)
                        tmax_req = min(tmax_win, tmax_this)

                        if channel.sample_rate:
                            deltat = 1.0 / channel.sample_rate.value
                        else:
                            deltat = 1.0

                        if tmin_req < tmax_req:
                            # extend time window by some samples because otherwise
                            # sometimes gaps are produced
                            selection.append(
                                nslc + (
                                    tmin_req-deltat*10.0,
                                    tmax_req+deltat*10.0))
                    except:
                        pass
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection)-1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i+neach]

                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i//neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))

                        data = ws.dataselect(
                            site=site, selection=selection_now,
                            **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            try:
                                tr.chop(tmin_win, tmax_win)
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except ws.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn(
                            'an error occurred while downloading data '
                            'for channels \n  %s' % '\n  '.join(
                                '.'.join(x[:4]) for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' % (
                nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(
                all_channels.keys(),
                key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info(
                'selected (%s): %i channel%s (%i station%s)' % (
                    '+'.join(sites),
                    nchannels,
                    plural_s(nchannels),
                    nstations,
                    plural_s(nstations)))

        logger.info(
            'selected total: %i channel%s (%i station%s)' % (
                nchannels_all,
                plural_s(nchannels_all),
                nstations_all,
                plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin-tpad, tmax+tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = ws.station(
                site=site, level='response', selection=selection)

            sxs[site].dump_xml(
                filename=op.join(output_dir, 'stations.%s.xml' % site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        for traces in plocal.chopper_grouped(
                gather=lambda tr: tr.nslc_id,
                tmin=tmin,
                tmax=tmax,
                tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(
                model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = station.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    #nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [
        nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad*2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(
            gather=lambda tr: tr.nslc_id,
            tmin=otmin,
            tmax=otmax,
            tinc=otinc,
            tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=output_units)

                    break

                except station.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except station.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData, trace.InfiniteResponse):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure,)))

        if rest_traces_b:
            try:
                rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                             deoverlap='crossfade_cos')

                fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
                rest_traces_a = []
                if win_a:
                    for tr in rest_traces:
                        try:
                            rest_traces_a.append(
                                tr.chop(win_a[0], win_a[1]+otpad,
                                        inplace=False))
                        except trace.NoData:
                            pass
            except:
                pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared',  fn_template1)
    fn_stations_prep = op.join(output_dir, 'stations_disp.txt')
    fn_event = op.join(event_dir, 'event.txt')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)
        traces = trace.degapper(traces, maxgap=50, maxlap=50)
        keep = []
        for tr in traces:
            if deltat is not None:
                try:
                    tr.downsample_to(deltat, snap=True, allow_upsample_max=5)
                    keep.append(tr)
                except util.UnavailableDecimation as e:
                    logger.warn('Cannot downsample %s.%s.%s.%s: %s'
                                % (tr.nslc_id + (e,)))
                    continue

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False
        trss = []
        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)
                        trss.append(tr)
            if proc:
                if tr.tmax - tr.tmin >= minlen:
                    io.save(proc, fn_waveforms)
                    used_stations.append(s)

        gaps = []

    prep_stations = list(used_stations)
    prep_stations_one = []
    cluster_stations_one = []
    prep_stations_cluster = prep_stations.copy()
    for st in prep_stations:
        for channel in ['BHE', 'BHN', 'BHZ', 'BH1', 'BH2']:
            try:
                st.remove_channel_by_name(channel)
            except:
                pass
        prep_stations_one.append(st)

    for st in prep_stations_cluster:
        for channel in ['R', 'T']:
            try:
                st.remove_channel_by_name(channel)
            except:
                pass
        cluster_stations_one.append(st)

    util.ensuredirs(fn_stations_prep)
    model.dump_events([event], fn_event)
    from subprocess import call
    script = "cat"+" "+ output_dir+"/rest/*.mseed" +"> "+ output_dir+"/traces.mseed"
    call(script, shell=True)
    script = "cat"+" "+ output_dir+"/prepared/*..*" +"> "+ output_dir+"/traces_rotated.mseed"
    call(script, shell=True)


    traces = io.load(output_dir+"/traces_rotated.mseed")

    for tr in traces:
        tr.ydata = num.diff(tr.ydata)
    io.save(traces, output_dir+"/traces_velocity.mseed")
    cluster_stations_ones = []
    for st in cluster_stations_one:
        add = 0
        for tr in traces:
            if st.station == tr.station:
                add = 1
                for stx in cluster_stations_ones:
                    if stx.station == st.station:
                        add = 0
                if add == 1:
                    cluster_stations_ones.append(st)

    prep_stations_ones = []
    for st in prep_stations_one:
        add = 0
        for tr in traces:
            if st.station == tr.station:
                add = 1
                for stx in prep_stations_ones:
                    if stx.station == st.station:
                        add = 0
                if add == 1:
                    prep_stations_ones.append(st)
    gaps = []
    remove = []

    for tr in traces:
        for st in cluster_stations_ones:
            for channel in st.channels:
                if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network:
                    gaps.append(st.station)


    remove = [x for x in gaps if gaps.count(x) > 1]
    for re in remove:
        for st in cluster_stations_ones:
            if st.station == re:
                try:
                    cluster_stations_ones.remove(st)
                except Exception:
                    pass

    gaps = []
    remove = []
    for tr in traces:
        for st in prep_stations_ones:
            for channel in st.channels:
                if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network:
                    gaps.append(st.station)
    remove = [x for x in gaps if gaps.count(x) > 1]
    for re in remove:
        for st in prep_stations_ones:
            if st.station == re:
                try:
                    prep_stations_ones.remove(st)
                except Exception:
                    pass

    fn_stations_cluster = op.join(output_dir, 'stations_cluster.txt')

    model.dump_stations(prep_stations_ones, fn_stations_prep)
    model.dump_stations(cluster_stations_ones, fn_stations_cluster)


    logger.info('prepared waveforms from %i stations' % len(stations))
Exemplo n.º 10
0
    def OFF_test_response(self):
        tmin = stt('2014-01-01 00:00:00')
        tmax = stt('2014-01-02 00:00:00')
        sx = fdsn_ws.station(site='iris',
                             network='II',
                             channel='?HZ',
                             startbefore=tmin,
                             endafter=tmax,
                             level='channel',
                             format='text',
                             matchtimeseries=True)

        for nslc in sx.nslc_code_list:
            print nslc
            net, sta, loc, cha = nslc
            sxr = fdsn_ws.station(site='iris',
                                  network=net,
                                  station=sta,
                                  location=loc,
                                  channel=cha,
                                  startbefore=tmin,
                                  endafter=tmax,
                                  level='response',
                                  matchtimeseries=True)

            fi = iris_ws.ws_resp(network=net,
                                 station=sta,
                                 location=loc,
                                 channel=cha,
                                 tmin=tmin,
                                 tmax=tmax)

            _, fn = tempfile.mkstemp()
            fo = open(fn, 'w')
            while True:
                d = fi.read(1024)
                if not d:
                    break

                fo.write(d)

            fo.close()

            resp_sx = sxr.get_pyrocko_response(nslc,
                                               timespan=(tmin, tmax),
                                               fake_input_units='M/S')

            resp_er = trace.Evalresp(fn, target='vel', nslc_id=nslc, time=tmin)
            fmin = 0.001
            fmax = 100.

            for _, _, channel in sxr.iter_network_station_channels(
                    net, sta, loc, cha, timespan=(tmin, tmax)):
                if channel.response:
                    fmax = channel.sample_rate.value * 0.5

            f = num.exp(num.linspace(num.log(fmin), num.log(fmax), 500))
            try:
                t_sx = resp_sx.evaluate(f)
                t_er = resp_er.evaluate(f)
                import pylab as lab

                abs_dif = num.abs(num.abs(t_sx) - num.abs(t_er)) / num.max(
                    num.abs(t_er))

                mda = num.mean(abs_dif[f < 0.5 * fmax])

                pha_dif = num.abs(num.angle(t_sx) - num.angle(t_er))

                mdp = num.mean(pha_dif[f < 0.5 * fmax])

                print mda, mdp

                if mda > 0.03 or mdp > 0.04:
                    lab.gcf().add_subplot(2, 1, 1)
                    lab.plot(f, num.abs(t_sx), color='black')
                    lab.plot(f, num.abs(t_er), color='red')
                    lab.xscale('log')
                    lab.yscale('log')

                    lab.gcf().add_subplot(2, 1, 2)
                    lab.plot(f, num.angle(t_sx), color='black')
                    lab.plot(f, num.angle(t_er), color='red')
                    lab.xscale('log')
                    lab.show()

                else:
                    print 'ok'
            except:
                print 'failed: ', nslc
Exemplo n.º 11
0
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = ws.station(site=site,
                            format='text',
                            level='channel',
                            **extra_args)

        except ws.EmptyResult:
            logger.error('No stations matching given criteria.')
            sx = None

        sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)
    try:
        nsl_to_sites = defaultdict(list)
        nsl_to_station = {}
        for sx, site in zip(sxs, sites):
            site_stations = sx.get_pyrocko_stations()
Exemplo n.º 12
0
    def OFF_test_response(self):
        tmin = stt('2014-01-01 00:00:00')
        tmax = stt('2014-01-02 00:00:00')
        sx = fdsn_ws.station(
            site='iris',
            network='II',
            channel='?HZ',
            startbefore=tmin,
            endafter=tmax,
            level='channel', format='text', matchtimeseries=True)

        for nslc in sx.nslc_code_list:
            print nslc
            net, sta, loc, cha = nslc
            sxr = fdsn_ws.station(
                site='iris',
                network=net,
                station=sta,
                location=loc,
                channel=cha,
                startbefore=tmin,
                endafter=tmax,
                level='response', matchtimeseries=True)

            fi = iris_ws.ws_resp(
                network=net,
                station=sta,
                location=loc,
                channel=cha,
                tmin=tmin,
                tmax=tmax)

            _, fn = tempfile.mkstemp()
            fo = open(fn, 'w')
            while True:
                d = fi.read(1024)
                if not d:
                    break

                fo.write(d)

            fo.close()

            resp_sx = sxr.get_pyrocko_response(
                nslc, timespan=(tmin, tmax),
                fake_input_units='M/S')

            resp_er = trace.Evalresp(fn, target='vel', nslc_id=nslc, time=tmin)
            fmin = 0.001
            fmax = 100.

            for _, _, channel in sxr.iter_network_station_channels(
                    net, sta, loc, cha, timespan=(tmin, tmax)):
                if channel.response:
                    fmax = channel.sample_rate.value * 0.5

            f = num.exp(num.linspace(num.log(fmin), num.log(fmax), 500))
            try:
                t_sx = resp_sx.evaluate(f)
                t_er = resp_er.evaluate(f)
                import pylab as lab

                abs_dif = num.abs(num.abs(t_sx) - num.abs(t_er)) / num.max(
                    num.abs(t_er))

                mda = num.mean(abs_dif[f < 0.5*fmax])

                pha_dif = num.abs(num.angle(t_sx) - num.angle(t_er))

                mdp = num.mean(pha_dif[f < 0.5*fmax])

                print mda, mdp

                if mda > 0.03 or mdp > 0.04:
                    lab.gcf().add_subplot(2, 1, 1)
                    lab.plot(f, num.abs(t_sx), color='black')
                    lab.plot(f, num.abs(t_er), color='red')
                    lab.xscale('log')
                    lab.yscale('log')

                    lab.gcf().add_subplot(2, 1, 2)
                    lab.plot(f, num.angle(t_sx), color='black')
                    lab.plot(f, num.angle(t_er), color='red')
                    lab.xscale('log')
                    lab.show()

                else:
                    print 'ok'
            except:
                print 'failed: ', nslc
Exemplo n.º 13
0
    def OFF_test_response(self):
        tmin = stt("2014-01-01 00:00:00")
        tmax = stt("2014-01-02 00:00:00")
        sx = fdsn_ws.station(
            site="iris",
            network="IU",
            channel="?HZ",
            startbefore=tmin,
            endafter=tmax,
            level="channel",
            format="text",
            matchtimeseries=True,
        )

        for nslc in sx.nslc_code_list:
            net, sta, loc, cha = nslc
            sxr = fdsn_ws.station(
                site="iris",
                network=net,
                station=sta,
                location=loc,
                channel=cha,
                startbefore=tmin,
                endafter=tmax,
                level="response",
                matchtimeseries=True,
            )

            fi = iris_ws.ws_resp(network=net, station=sta, location=loc, channel=cha, tmin=tmin, tmax=tmax)

            _, fn = tempfile.mkstemp()
            fo = open(fn, "w")
            while True:
                d = fi.read(1024)
                if not d:
                    break

                fo.write(d)

            fo.close()

            resp_sx = sxr.get_pyrocko_response(nslc, timespan=(tmin, tmax))
            resp_er = trace.Evalresp(fn, target="vel", nslc_id=nslc, time=tmin)
            fmin = 0.001
            fmax = 100.0

            for _, _, channel in sxr.iter_network_station_channels(net, sta, loc, cha, timespan=(tmin, tmax)):
                if channel.response:
                    fmax = channel.sample_rate.value * 0.5

            f = num.exp(num.linspace(num.log(fmin), num.log(fmax), 500))
            try:
                t_sx = resp_sx.evaluate(f)
                t_er = resp_er.evaluate(f)
                import pylab as lab

                abs_dif = num.abs(num.abs(t_sx) - num.abs(t_er)) / num.max(num.abs(t_er))

                mda = num.mean(abs_dif[f < 0.5 * fmax])

                pha_dif = num.abs(num.angle(t_sx) - num.angle(t_er))

                mdp = num.mean(pha_dif[f < 0.5 * fmax])

                print mda, mdp

                if mda > 0.03 or mdp > 0.04:
                    lab.gcf().add_subplot(2, 1, 1)
                    lab.plot(f, num.abs(t_sx), color="black")
                    lab.plot(f, num.abs(t_er), color="red")
                    lab.xscale("log")
                    lab.yscale("log")

                    lab.gcf().add_subplot(2, 1, 2)
                    lab.plot(f, num.angle(t_sx), color="black")
                    lab.plot(f, num.angle(t_er), color="red")
                    lab.xscale("log")
                    lab.show()

                else:
                    print "ok"
            except:
                print "failed: ", nslc
Exemplo n.º 14
0
    def test_retrieve(self):
        for site in ["geofon", "iris"]:
            fsx = fdsn_ws.station(site=site, network="GE", station="EIL", level="channel")

            assert len(fsx.get_pyrocko_stations(time=stt("2010-01-15 10:00:00"))) == 1
Exemplo n.º 15
0
    def download(self,
                 event,
                 directory='array_data',
                 timing=None,
                 length=None,
                 want='all',
                 force=False,
                 prefix=False,
                 dump_config=False,
                 get_responses=False):
        """:param want: either 'all' or ID as string or list of IDs as strings
        """
        use = []
        #ts = {}
        unit = 'M'
        if all([timing, length]) is None:
            raise Exception('Define one of "timing" and "length"')
        prefix = prefix or ''
        directory = pjoin(prefix, directory)
        if not os.path.isdir(directory):
            os.mkdir(directory)
        pzresponses = {}
        logger.info('download data: %s at %sN %sE' %
                    (event.name, event.lat, event.lon))
        for site, array_data_provder in self.providers.items():
            logger.info('requesting data from site %s' % site)
            for array_id, codes in array_data_provder.items():
                if array_id not in want and want != ['all']:
                    continue
                sub_directory = pjoin(directory, array_id)
                logger.info("%s" % array_id)
                codes = array_data_provder[array_id]
                if not isinstance(codes, list):
                    codes = [codes]
                selection = [
                    c + tuple((event.time, event.time + 1000.)) for c in codes
                ]
                logger.debug('selection: %s' % selection)
                try:
                    #    if site=='bgr':
                    #        st = ws.station(url='http://eida.bgr.de/', selection=selection)
                    #    else:
                    #        st = ws.station(site=site, selection=selection)
                    st = ws.station(site=site, selection=selection)
                except ws.EmptyResult as e:
                    logging.error('No results: %s %s. skip' % (e, array_id))
                    continue
                except ValueError as e:
                    logger.error(e)
                    logger.error('...skipping...')
                    continue

                stations = st.get_pyrocko_stations()
                min_dist = min(
                    [ortho.distance_accurate50m(s, event) for s in stations])
                max_dist = max(
                    [ortho.distance_accurate50m(s, event) for s in stations])

                mod = cake.load_model(crust2_profile=(event.lat, event.lon))
                if length:
                    tstart = 0.
                    tend = length
                elif timing:
                    tstart = timing[0].t(mod, (event.depth, min_dist))
                    tend = timing[1].t(mod, (event.depth, max_dist))
                selection = [
                    c + tuple((event.time + tstart, event.time + tend))
                    for c in codes
                ]
                try:
                    d = ws.dataselect(site=site, selection=selection)
                    store.remake_dir(sub_directory, force)
                    store.remake_dir(pjoin(sub_directory, 'responses'), force)
                    fn = pjoin(sub_directory, 'traces.mseed')
                    with open(fn, 'w') as f:
                        f.write(d.read())
                        f.close()
                    if get_responses:
                        trs = io.load(fn, getdata=False)
                        logger.info('Request responses from %s' % site)
                        if progressbar:
                            pb = progressbar.ProgressBar(
                                maxval=len(trs)).start()
                        for i_tr, tr in enumerate(trs):
                            try:
                                st = ws.station(site=site,
                                                selection=selection,
                                                level='response')
                                pzresponse = st.get_pyrocko_response(
                                    nslc=tr.nslc_id,
                                    timespan=(tr.tmin, tr.tmax),
                                    fake_input_units=unit)
                                pzresponse.regularize()
                            except fdsnstation.NoResponseInformation as e:
                                logger.warn("no response information: %s" % e)
                                pzresponse = None
                                pass
                            except fdsnstation.MultipleResponseInformation as e:
                                logger.warn("MultipleResponseInformation: %s" %
                                            e)
                                pzresponse = None
                                pass
                            pzresponses[tr.nslc_id] = pzresponse
                            pzresponses[tr.nslc_id].dump(filename=pjoin(
                                sub_directory, 'responses', 'resp_%s.yaml' %
                                '.'.join(tr.nslc_id)))
                            if progressbar:
                                pb.update(i_tr)
                        if progressbar:
                            pb.finish()
                    model.dump_stations(stations,
                                        pjoin(sub_directory, 'stations.pf'))

                    if timing:
                        t = Timings(list(timing))
                        self.timings[array_id] = t
                    if array_id not in use and array_id not in self.use:
                        use.append(array_id)
                except ws.EmptyResult as e:
                    logging.error('%s on %s' % (e, array_id))

        self.use.extend(use)