def download(self, event, directory='array_data', timing=None, length=None,
                 want='all', force=False, prefix=False, dump_config=False,
                 get_responses=False):
        """:param want: either 'all' or ID as string or list of IDs as strings
        """
        use = []
        #ts = {}
        unit = 'M'
        if all([timing, length]) is None:
            raise Exception('Define one of "timing" and "length"')
        prefix = prefix or ''
        directory = pjoin(prefix, directory)
        if not os.path.isdir(directory):
            os.mkdir(directory)
        pzresponses = {}
        logger.info('download data: %s at %sN %sE' % (
            event.name, event.lat, event.lon))
        for site, array_data_provder in self.providers.items():
            logger.info('requesting data from site %s' % site)
            for array_id, codes in array_data_provder.items():
                if array_id not in want and want != ['all']:
                    continue
                sub_directory = pjoin(directory, array_id)
                logger.info("%s" % array_id)
                codes = array_data_provder[array_id]
                if not isinstance(codes, list):
                    codes = [codes]
                selection = [
                    c + tuple((event.time, event.time+1000.)) for c in codes]
                logger.debug('selection: %s' % selection)
                try:
                #    if site=='bgr':
                #        st = ws.station(url='http://eida.bgr.de/', selection=selection)
                #    else:
                #        st = ws.station(site=site, selection=selection)
                    st = ws.station(site=site, selection=selection)
                except ws.EmptyResult as e:
                    logging.error('No results: %s %s. skip' % (e, array_id))
                    continue
                except ValueError as e:
                    logger.error(e)
                    logger.error('...skipping...')
                    continue

                stations = st.get_pyrocko_stations()
                min_dist = min(
                    [ortho.distance_accurate50m(s, event) for s in stations])
                max_dist = max(
                    [ortho.distance_accurate50m(s, event) for s in stations])

                mod = cake.load_model(crust2_profile=(event.lat, event.lon))
                if length:
                    tstart = 0.
                    tend = length
                elif timing:
                    tstart = timing[0].t(mod, (event.depth, min_dist))
                    tend = timing[1].t(mod, (event.depth, max_dist))
                selection = [
                    c + tuple((event.time + tstart, event.time + tend)
                              ) for c in codes]
                try:
                    d = ws.dataselect(site=site, selection=selection)
                    store.remake_dir(sub_directory, force)
                    store.remake_dir(pjoin(sub_directory, 'responses'), force)
                    fn = pjoin(sub_directory, 'traces.mseed')
                    with open(fn, 'w') as f:
                        f.write(d.read())
                        f.close()
                    if get_responses:
                        trs = io.load(fn, getdata=False)
                        logger.info('Request responses from %s' % site)
                        if progressbar:
                            pb = progressbar.ProgressBar(maxval=len(trs)).start()
                        for i_tr, tr in enumerate(trs):
                            try:
                                st = ws.station(
                                    site=site, selection=selection, level='response')
                                pzresponse = st.get_pyrocko_response(
                                    nslc=tr.nslc_id,
                                    timespan=(tr.tmin, tr.tmax),
                                    fake_input_units=unit)
                                pzresponse.regularize()
                            except fdsnstation.NoResponseInformation as e:
                                logger.warn("no response information: %s" % e)
                                pzresponse = None
                                pass
                            except fdsnstation.MultipleResponseInformation as e:
                                logger.warn("MultipleResponseInformation: %s" % e)
                                pzresponse = None
                                pass
                            pzresponses[tr.nslc_id] = pzresponse
                            pzresponses[tr.nslc_id].dump(filename=pjoin(
                                sub_directory,
                                'responses',
                                'resp_%s.yaml' % '.'.join(tr.nslc_id)))
                            if progressbar:
                                pb.update(i_tr)
                        if progressbar:
                            pb.finish()
                    model.dump_stations(
                        stations, pjoin(sub_directory, 'stations.pf'))

                    if timing:
                        t = Timings(list(timing))
                        self.timings[array_id] = t
                    if array_id not in use and array_id not in self.use:
                        use.append(array_id)
                except ws.EmptyResult as e:
                    logging.error('%s on %s' % (e, array_id))

        self.use.extend(use)
示例#2
0
    def download(self,
                 event,
                 directory='array_data',
                 timing=None,
                 length=None,
                 want='all',
                 force=False,
                 prefix=False,
                 dump_config=False,
                 get_responses=False):
        """:param want: either 'all' or ID as string or list of IDs as strings
        """
        use = []
        #ts = {}
        unit = 'M'
        if all([timing, length]) is None:
            raise Exception('Define one of "timing" and "length"')
        prefix = prefix or ''
        directory = pjoin(prefix, directory)
        if not os.path.isdir(directory):
            os.mkdir(directory)
        pzresponses = {}
        logger.info('download data: %s at %sN %sE' %
                    (event.name, event.lat, event.lon))
        for site, array_data_provder in self.providers.items():
            logger.info('requesting data from site %s' % site)
            for array_id, codes in array_data_provder.items():
                if array_id not in want and want != ['all']:
                    continue
                sub_directory = pjoin(directory, array_id)
                logger.info("%s" % array_id)
                codes = array_data_provder[array_id]
                if not isinstance(codes, list):
                    codes = [codes]
                selection = [
                    c + tuple((event.time, event.time + 1000.)) for c in codes
                ]
                logger.debug('selection: %s' % selection)
                try:
                    #    if site=='bgr':
                    #        st = ws.station(url='http://eida.bgr.de/', selection=selection)
                    #    else:
                    #        st = ws.station(site=site, selection=selection)
                    st = ws.station(site=site, selection=selection)
                except ws.EmptyResult as e:
                    logging.error('No results: %s %s. skip' % (e, array_id))
                    continue
                except ValueError as e:
                    logger.error(e)
                    logger.error('...skipping...')
                    continue

                stations = st.get_pyrocko_stations()
                min_dist = min(
                    [ortho.distance_accurate50m(s, event) for s in stations])
                max_dist = max(
                    [ortho.distance_accurate50m(s, event) for s in stations])

                mod = cake.load_model(crust2_profile=(event.lat, event.lon))
                if length:
                    tstart = 0.
                    tend = length
                elif timing:
                    tstart = timing[0].t(mod, (event.depth, min_dist))
                    tend = timing[1].t(mod, (event.depth, max_dist))
                selection = [
                    c + tuple((event.time + tstart, event.time + tend))
                    for c in codes
                ]
                try:
                    d = ws.dataselect(site=site, selection=selection)
                    store.remake_dir(sub_directory, force)
                    store.remake_dir(pjoin(sub_directory, 'responses'), force)
                    fn = pjoin(sub_directory, 'traces.mseed')
                    with open(fn, 'w') as f:
                        f.write(d.read())
                        f.close()
                    if get_responses:
                        trs = io.load(fn, getdata=False)
                        logger.info('Request responses from %s' % site)
                        if progressbar:
                            pb = progressbar.ProgressBar(
                                maxval=len(trs)).start()
                        for i_tr, tr in enumerate(trs):
                            try:
                                st = ws.station(site=site,
                                                selection=selection,
                                                level='response')
                                pzresponse = st.get_pyrocko_response(
                                    nslc=tr.nslc_id,
                                    timespan=(tr.tmin, tr.tmax),
                                    fake_input_units=unit)
                                pzresponse.regularize()
                            except fdsnstation.NoResponseInformation as e:
                                logger.warn("no response information: %s" % e)
                                pzresponse = None
                                pass
                            except fdsnstation.MultipleResponseInformation as e:
                                logger.warn("MultipleResponseInformation: %s" %
                                            e)
                                pzresponse = None
                                pass
                            pzresponses[tr.nslc_id] = pzresponse
                            pzresponses[tr.nslc_id].dump(filename=pjoin(
                                sub_directory, 'responses', 'resp_%s.yaml' %
                                '.'.join(tr.nslc_id)))
                            if progressbar:
                                pb.update(i_tr)
                        if progressbar:
                            pb.finish()
                    model.dump_stations(stations,
                                        pjoin(sub_directory, 'stations.pf'))

                    if timing:
                        t = Timings(list(timing))
                        self.timings[array_id] = t
                    if array_id not in use and array_id not in self.use:
                        use.append(array_id)
                except ws.EmptyResult as e:
                    logging.error('%s on %s' % (e, array_id))

        self.use.extend(use)
示例#3
0
def propose_stores(distances,
                   models,
                   superdir,
                   source_depth_min=0.,
                   source_depth_max=15.,
                   source_depth_delta=1.,
                   sample_rate=10.,
                   force=False,
                   numdists=2,
                   run_ttt=False,
                   simplify=False,
                   phases=['P'],
                   classic=True,
                   distance_delta_max=None):
    ''' Propose a fomosto store configuration for P-pP Array beam forming.
    :param event: Event instance
    :param superdir: where to create the store (default, current directory)
    :param source_depth_min: minimum source depth (default 0)
    :param source_depth_max: maximum source deoth (default 15)
    :param source_depth_delta: increment
    :param sample_rate: in Hz
    :param force_overwrite: overwrite potentially existent store
    :param run_ttt: generate travel time tables right away'''

    modelling_code_id = 'qseis.2006a'

    configs = []
    if classic:
        define_method = cake.PhaseDef
    else:
        define_method = cake.PhaseDef.classic

    wanted = [define_method(ph) for ph in phases]

    global_model = cake.load_model()
    remake_dir(superdir, force)
    for (station_id, key_station, key_event), (dist_min,
                                               dist_max) in distances.items():

        configid = '%s_%s_%s' % (station_id, key_station, key_event)
        distance_delta = dist_max - dist_min
        if distance_delta_max is not None:
            while distance_delta > distance_delta_max:
                distance_delta /= 2.
        config = ConfigTypeA(id=configid,
                             source_depth_min=source_depth_min * km,
                             source_depth_max=source_depth_max * km,
                             source_depth_delta=source_depth_delta * km,
                             distance_min=dist_min,
                             distance_max=dist_max,
                             distance_delta=distance_delta,
                             sample_rate=sample_rate,
                             ncomponents=10)

        station_crust = models[key_station]
        config.earthmodel_receiver_1d = cake.LayeredModel.from_scanlines(
            cake.from_crust2x2_profile(station_crust))

        config.earthmodel_1d = global_model.replaced_crust(
            crust2_profile=models[key_event])

        if simplify:
            config.earthmodel_1d = config.earthmodel_1d.simplify(
                max_rel_error=0.002)
        adjust_earthmodel_receiver_depth(config)
        configs.append(config)
        dest_dir = pjoin(superdir, config.id)
        remake_dir(dest_dir, force)
        logger.info('Created store: %s' % dest_dir)

        mean_z = num.mean((config.source_depth_min, config.source_depth_max))
        mean_dist = num.mean((config.distance_min, config.distance_max))
        arrivals = config.earthmodel_1d.arrivals(
            phases=wanted, distances=[mean_dist * cake.m2d], zstart=mean_z)
        if len(arrivals) == 0:
            logger.warning(
                NoRay('d: %s, z: %s, %s phases: %s' %
                      (mean_dist * cake.m2d, mean_z,
                       'classic' if classic else '', "|".join(phases))))
            slow = 0.1
            slowness_taper = (0., 0., 1.3 * slow, 1.5 * slow)
            z_turn = num.max(config.earthmodel_1d.profile('z'))
        else:
            slow = arrivals[0].p / (cake.r2d * cake.d2m / km)
            slowness_taper = (0.3 * slow, 0.5 * slow, 1.5 * slow, 1.7 * slow)
            z_turn = num.max(arrivals[0].zxt_path_subdivided()[0])

        zmax = max(z_turn * 1.1,
                   config.earthmodel_receiver_1d.profile('z')[-1])

        config.earthmodel_1d = config.earthmodel_1d.extract(depth_max=zmax)
        begin_phase_defs = 'P,P\\,PP'
        if model_has_cmb(config.earthmodel_1d):
            begin_phase_defs += ',Pv_(cmb)p'
        config.modelling_code_id = modelling_code_id
        config.tabulated_phases = [
            TPDef(id='begin', definition=begin_phase_defs),
            TPDef(id='end', definition='2.5'),
            TPDef(id='PP', definition='PP'),
            TPDef(id='P', definition='P')
        ]

        qs = qseis.QSeisConfig()
        qs.qseis_version = config.modelling_code_id.split('.')[1]
        half_lapse_time = 55
        qs.time_region = (Timing('begin-%s' % (half_lapse_time * 1.1)),
                          Timing('begin+%s' % (half_lapse_time * 1.1)))
        qs.cut = (Timing('begin-%s' % half_lapse_time),
                  Timing('begin+%s' % half_lapse_time))
        qs.slowness_window = slowness_taper
        qs.wavelet_duration_samples = 0.001
        qs.sw_flat_earth_transform = 1
        qs.filter_shallow_paths = 1
        qs.filter_shallow_paths_depth = float(z_turn * 0.2)
        qs.sw_algorithm = 1
        Store.create_editables(dest_dir, config=config, extra={'qseis': qs})
        if run_ttt:
            st = Store(dest_dir)
            st.make_ttt()

    config_ids = [c.id for c in configs]
    return config_ids
def propose_stores(distances, models, superdir, source_depth_min=0.,
                  source_depth_max=15., source_depth_delta=1., sample_rate=10.,
                  force=False, numdists=2, run_ttt=False, simplify=False,
                  phases=['P'], classic=True, distance_delta_max=None):
    ''' Propose a fomosto store configuration for P-pP Array beam forming.
    :param event: Event instance
    :param superdir: where to create the store (default, current directory)
    :param source_depth_min: minimum source depth (default 0)
    :param source_depth_max: maximum source deoth (default 15)
    :param source_depth_delta: increment
    :param sample_rate: in Hz
    :param force_overwrite: overwrite potentially existent store
    :param run_ttt: generate travel time tables right away'''

    modelling_code_id = 'qseis.2006a'

    configs = []
    if classic:
        define_method = cake.PhaseDef
    else:
        define_method = cake.PhaseDef.classic

    wanted = [define_method(ph) for ph in phases]

    global_model = cake.load_model()
    remake_dir(superdir, force)
    for (station_id, key_station, key_event), (dist_min, dist_max) in distances.items():

        configid = '%s_%s_%s' % (station_id, key_station, key_event)
        distance_delta = dist_max - dist_min
        if distance_delta_max is not None:
            while distance_delta > distance_delta_max:
                distance_delta /= 2.
        config = ConfigTypeA(id=configid,
                             source_depth_min=source_depth_min*km,
                             source_depth_max=source_depth_max*km,
                             source_depth_delta=source_depth_delta*km,
                             distance_min=dist_min,
                             distance_max=dist_max,
                             distance_delta=distance_delta,
                             sample_rate=sample_rate,
                             ncomponents=10)

        station_crust = models[key_station]
        config.earthmodel_receiver_1d = cake.LayeredModel.from_scanlines(
            cake.from_crust2x2_profile(station_crust))

        config.earthmodel_1d = global_model.replaced_crust(
            crust2_profile=models[key_event])

        if simplify:
            config.earthmodel_1d = config.earthmodel_1d.simplify(max_rel_error=0.002)
        adjust_earthmodel_receiver_depth(config)
        configs.append(config)
        dest_dir = pjoin(superdir, config.id)
        remake_dir(dest_dir, force)
        logger.info('Created store: %s' % dest_dir)

        mean_z = num.mean((config.source_depth_min, config.source_depth_max))
        mean_dist = num.mean((config.distance_min, config.distance_max))
        arrivals = config.earthmodel_1d.arrivals(
            phases=wanted, distances=[mean_dist * cake.m2d], zstart=mean_z)
        if len(arrivals)==0:
            logger.warning(NoRay('d: %s, z: %s, %s phases: %s' %(mean_dist*cake.m2d,
                                mean_z, 'classic' if classic else '', "|".join(phases))))
            slow = 0.1
            slowness_taper = (0., 0., 1.3*slow, 1.5*slow)
            z_turn = num.max(config.earthmodel_1d.profile('z'))
        else:
            slow = arrivals[0].p/(cake.r2d*cake.d2m/km)
            slowness_taper = (0.3*slow, 0.5*slow, 1.5*slow, 1.7*slow)
            z_turn = num.max(arrivals[0].zxt_path_subdivided()[0])

        zmax = max(
            z_turn*1.1, config.earthmodel_receiver_1d.profile('z')[-1])

        config.earthmodel_1d = config.earthmodel_1d.extract(depth_max=zmax)
        begin_phase_defs = 'P,P\\,PP'
        if model_has_cmb(config.earthmodel_1d):
            begin_phase_defs += ',Pv_(cmb)p'
        config.modelling_code_id = modelling_code_id
        config.tabulated_phases=[
            TPDef(
                id='begin',
                definition=begin_phase_defs),
            TPDef(
                id='end',
                definition='2.5'),
            TPDef(
                id='PP',
                definition='PP'),
            TPDef(
                id='P',
                definition='P')]

        qs = qseis.QSeisConfig()
        qs.qseis_version = config.modelling_code_id.split('.')[1]
        half_lapse_time = 55
        qs.time_region = (Timing('begin-%s' % (half_lapse_time*1.1)), Timing('begin+%s' % (half_lapse_time*1.1)))
        qs.cut = (Timing('begin-%s' % half_lapse_time), Timing('begin+%s' % half_lapse_time))
        qs.slowness_window = slowness_taper
        qs.wavelet_duration_samples = 0.001
        qs.sw_flat_earth_transform = 1
        qs.filter_shallow_paths = 1
        qs.filter_shallow_paths_depth = float(z_turn * 0.2)
        qs.sw_algorithm = 1
        Store.create_editables(dest_dir, config=config, extra={'qseis': qs})
        if run_ttt:
            st = Store(dest_dir)
            st.make_ttt()

    config_ids = [c.id for c in configs]
    return config_ids