Example #1
0
def get_params():

    params = {}
    params['stations'] = model.load_stations('stationsBO.txt')
    params['sources'] = model.load_stations('stationsBO.txt')
    params['receivers'] = model.load_stations('stationsBO.txt')

    params['bindirA'] = '../NL-TA/bins/TA-NL/cc_average/'
    params['bindirB'] = '../NL-TA/bins/TA-NL/cc_average/'
    #params['bindirA']                = '../NLTA2/bins/cc_average/'
    #params['bindirB']                = '../NLTA2/bins/cc_average/'
    params['df'] = 20.
    params['appvel'] = 1.2
    params['Xcoda'] = 2.
    params['coda_len'] = 410
    params['distThreshold'] = 100
    params['shortWin'] = 40
    params['overlap'] = 0.6
    params['onebit'] = False  # even smaller window if needed
    params['stack_method'] = 'linear'

    params['freqmax'] = 9.5  #float(get_config(db, "preprocess_lowpass"))
    params['freqmin'] = 0.01  #float(get_config(db, "preprocess_highpass"))
    #    params['format_out']            = 'MSEED'
    #    params['nthreads']              = 8

    return params
Example #2
0
    def readpyrockostations(self, phases, test=False):
        try:
            stations = model.load_stations(self.eventpath +
                                           '/data/stations_cluster.txt')
        except Exception:
            stations = model.load_stations(self.eventpath +
                                           '/data/stations_disp.txt')
        MetaL = []
        for phase in phases:
            if phase is 'P':
                desired = 'Z'
            if phase is 'S':
                desired = 'T'
            for sl in stations:
                count_channel = 0
                for channel in sl.channels:
                    if channel.name[
                            -1] == desired and channel.name is not "HHZ":
                        MetaL.append(
                            Station(str(sl.network), str(sl.station),
                                    str(sl.location),
                                    str(channel)[:3], str(sl.lat), str(sl.lon),
                                    str(sl.elevation), str(channel.dip),
                                    str(channel.azimuth), str(channel.gain)))
                    count_channel = count_channel + 1

        FML = self.checkMetaInfoFile(MetaL)

        return FML
Example #3
0
def process(args, scenario_folder, n_tests=1, show=True):
    nstart = 8
    array_centers = []
    from .guesstimate_depth_v02 import PlotSettings, plot

    events = []
    stations = []
    mod = insheim_layered_model()

    for i in range(nstart, nstart+1):
        i = 8
        scenario_folder = "scenarios/"
        print("%s/scenario_%s/event.txt" % (scenario_folder, i))

        events.append(model.load_events("%s/scenario_%s/event.txt" % (scenario_folder, i))[0])
        stations.append(model.load_stations("%s/scenario_%s/stations.pf" % (scenario_folder, i)))
        traces = io.load(pjoin("%sscenario_%s/" % (scenario_folder, i), 'traces.mseed'))

        event = events[0]
        stations = stations[0]
        min_dist = min(
            [ortho.distance_accurate50m(s, event) for s in stations])
        max_dist = max(
            [ortho.distance_accurate50m(s, event) for s in stations])
        tmin = CakeTiming(phase_selection='first(p|P|PP)-10', fallback_time=0.001)
        tmax = CakeTiming(phase_selection='first(p|P|PP)+52', fallback_time=1000.)
        timing=(tmin, tmax)

        fns = ['.']


        array_id = "INS"


        settings_fn = pjoin("%sscenario_%s/" % (scenario_folder, i), 'plot_settings.yaml')
        settings = PlotSettings.from_argument_parser(args)

        if not settings.trace_filename:
            settings.trace_filename = pjoin("%sscenario_%s/" % (scenario_folder, i), 'beam.mseed')
        if not settings.station_filename:
            fn_array_center = pjoin("%sscenario_%s/" % (scenario_folder, i), 'array_center.pf')
            settings.station_filename = fn_array_center
            station = model.load_stations(fn_array_center)

            settings.store_id = 'landau_100hz'


        settings.event_filename = pjoin("%sscenario_%s/" % (scenario_folder, i), "event.txt")
        settings.save_as = pjoin("%sscenario_%s/" % (scenario_folder, i), "depth_%(array-id)s.png")
        plot(settings)
        if args.overwrite_settings:
            settings.dump(filename=settings_fn)
        if show is True:
            plt.show()
Example #4
0
def get_params():
    stations = model.load_stations('sources.txt')
    receivers = model.load_stations('receivers.txt')
    # Get Configuration
    params = {}
#    params['path_to_data']          = '/Volumes/VOLC_DATA/LASTARRIA/raw'#'/Users/Zack/Desktop/corrpy/RAW'
    params['path_to_data']          = '/Users/Hugh/Documents/data/SoCal'
    params['output_folder']         = '/Users/Hugh/Documents/CORR'
    params['architecture']          = 'IDDS'
    params['cc_sampling_rate']      = 5 #float(get_config(db, "cc_sampling_rate"))
    params['analysis_duration']     = 86400 # how much time to process as bunch. must stay like that not implemented to change now
    params['overlap']               = 0.5 
    params['maxlag']                = 1000 # even smaller window if needed
    params['corr_duration']         = 1800 # slicing the 86400 in small windows
    params['npts']                  = params['corr_duration'] * params['cc_sampling_rate']
    params['temp_norm']             = 0 # 0: removing eq with param 'clipping'; -1: 1-bit normalization; 1: Windsorinzing with param 'clipping'   
    params['clipping']              = 8 # clipping eq or windsorizing at 3 * std(trace)
    params['resampling_method']     = "Resample" #"Decimate"
    params['decimation_factor']     = int(5)
    params['preprocess_lowpass']    = 2.00#float(get_config(db, "preprocess_lowpass"))
    params['preprocess_highpass']   = 0.05#float(get_config(db, "preprocess_highpass"))
    params['keep_all']              = False#get_config(db, 'keep_all', isbool=True)
    params['keep_days']             = True#get_config(db, 'keep_days', isbool=True)
    params['components_to_compute'] = ['ZZ']#get_components_to_compute(db)
    params['sources_to_corr']       = ['%s.%s'%(sta.network,sta.station) for sta in stations]
    params['receivers_to_corr']     = ['%s.%s'%(sta.network,sta.station) for sta in receivers]
    params['starttime']             = '2014-01-01'
    params['endtime']               = '2014-12-31'
    params['export_format']         = 'sac'
    params['sac_format']            = 'doublets' #Format for SAC stacks ? [doublets]/clarke
    params['crosscorr']             = True
    params['deconvolution']         = True
    params['cross-coherence']       = False#True
    params['nthreads']              = 3

    filter1 = {} 
    filter1['ref']          = 1
    filter1['low']          = 0.05               # The lower frequency bound of the Whiten function (in Hz) 
    filter1['high']         = 2.00                  # The upper frequency bound of the Whiten function (in Hz)
    filter1['rms_threshold'] = 3
#    filter2 = {} 
#    filter2['ref']      = 2
#    filter2['low']      = 0.01               # The lower frequency bound of the Whiten function (in Hz) 
#    filter2['high']     = 8                  # The upper frequency bound of the Whiten function (in Hz)

    filters = {}
    filters['1']=filter1
#    filters['2']=filter2
    
    return params, filters
Example #5
0
def readpyrockostations(path, disp):

    if disp is True:
        stations = model.load_stations(path + '/data/stations_cluster.txt')
    else:
        stations = model.load_stations(path + '/data/stations_cluster.txt')
    MetaL = []
    for sl in stations:
        channel = sl.channels[0]
        MetaL.append(
            Station(str(sl.network), str(sl.station), str(sl.location),
                    str(sl.channels[0].name), str(sl.lat), str(sl.lon),
                    str(sl.elevation), str(channel.dip), str(channel.azimuth),
                    str(channel.gain)))
    return MetaL
Example #6
0
    def get_receivers(self):
        '''Aggregate receivers from different sources.'''

        fp = self.expand_path

        if self._receivers is None:
            self._receivers = list(self.receivers)
            if self.stations_path:
                for station in model.load_stations(fp(self.stations_path)):
                    self._receivers.append(
                        receiver.Receiver(codes=station.nsl(),
                                          lat=station.lat,
                                          lon=station.lon,
                                          z=station.depth))

            if self.stations_stationxml_path:
                sx = stationxml.load_xml(
                    filename=fp(self.stations_stationxml_path))
                for station in sx.get_pyrocko_stations():
                    self._receivers.append(
                        receiver.Receiver(codes=station.nsl(),
                                          lat=station.lat,
                                          lon=station.lon,
                                          z=station.depth))

        return self._receivers
Example #7
0
    def setup(self):
        self.sources = guts.load(filename=self.fn_sources)
        self.targets = []

        if self.fn_targets:
            self.targets.extend(guts.load(filename=self.fn_targets))

        if self.fn_stations:
            stats = load_stations(self.fn_stations)
            self.targets.extend(self.cast_stations_to_targets(stats))

        if self.store_id:
            for t in self.targets:
                t.store_id = self.store_id

        if self.center_sources:
            self.move_sources_to_station_center()

        self.config.channels = [t.codes for t in self.targets]
        store_ids = [t.store_id for t in self.targets]
        store_id = set(store_ids)
        assert len(store_id) == 1, 'More than one store used. Not \
                implemented yet'

        self.store = self.engine.get_store(store_id.pop())

        self.sources = filter_oob(self.sources, self.targets, self.store.config)

        dt = self.config.deltat_want or self.store.config.deltat
        self.n_samples = int((self.config.sample_length + self.config.tpad) / dt)
Example #8
0
    def add_stations(self,
                     stations=None,
                     pyrocko_stations_filename=None,
                     stationxml_filenames=None):

        if stations is not None:
            for station in stations:
                self.stations[station.nsl()] = station

        if pyrocko_stations_filename is not None:
            logger.debug('loading stations from file %s' %
                         pyrocko_stations_filename)

            for station in model.load_stations(pyrocko_stations_filename):
                self.stations[station.nsl()] = station

        if stationxml_filenames is not None and len(stationxml_filenames) > 0:

            for stationxml_filename in stationxml_filenames:
                logger.debug('loading stations from StationXML file %s' %
                             stationxml_filename)

                sx = fs.load_xml(filename=stationxml_filename)
                for station in sx.get_pyrocko_stations():
                    channels = station.get_channels()
                    if len(channels) == 1 and channels[0].name.endswith('Z'):
                        logger.warning(
                            'Station %s has vertical component'
                            ' information only, adding mocked channels.' %
                            station.nsl_string())
                        station.add_channel(model.Channel('N'))
                        station.add_channel(model.Channel('E'))

                    self.stations[station.nsl()] = station
Example #9
0
def load_and_blacklist_stations(datadir, blacklist):
    '''
    Load stations from autokiwi output and apply blacklist
    '''

    stations = model.load_stations(datadir + 'stations.txt')
    return utility.apply_station_blacklist(stations, blacklist)
Example #10
0
    def add_stations(
            self,
            stations=None,
            pyrocko_stations_filename=None,
            stationxml_filenames=None):

        if stations is not None:
            for station in stations:
                self.stations[station.nsl()] = station

        if pyrocko_stations_filename is not None:
            logger.debug(
                'Loading stations from file %s' %
                pyrocko_stations_filename)

            for station in model.load_stations(pyrocko_stations_filename):
                self.stations[station.nsl()] = station

        if stationxml_filenames is not None and len(stationxml_filenames) > 0:

            for stationxml_filename in stationxml_filenames:
                logger.debug(
                    'Loading stations from StationXML file %s' %
                    stationxml_filename)

                sx = cached_load_stationxml(stationxml_filename)
                for station in sx.get_pyrocko_stations():
                    self.stations[station.nsl()] = station
Example #11
0
    def readpyrockostations(self):
        try:
            stations = model.load_stations(self.eventpath+'/data/stations_cluster.txt')
        except Exception:
            stations = model.load_stations(self.eventpath+'/data/stations_disp.txt')
        MetaL = []
        for sl in stations:
            for channel in sl.channels:
                #channel = sl.channels[0]
                MetaL.append(Station(str(sl.network),str(sl.station),
                str(sl.location),str(sl.channels[0])[:3],str(sl.lat),str(sl.lon),
                str(sl.elevation),str(channel.dip),str(channel.azimuth),
                str(channel.gain)))

        FML = self.checkMetaInfoFile(MetaL)

        return FML
Example #12
0
    def __init__(self, *args, **kwargs):
        super(PinkyConfig, self).__init__(*args, **kwargs)
        stations = load_stations(self.fn_stations)
        self.targets = stations_to_targets(stations)

        if not self.reference_target:
            targets_by_code = {'.'.join(t.codes[:3]): t for t in self.targets}
            self.reference_target = targets_by_code[self.reference_station]
Example #13
0
def get_params():
    # Get Configuration
    params = {}
    params['stations'] = model.load_stations('stations.txt')  #all sta
    params['receivers'] = model.load_stations('receivers.txt')  #sta to analyze
    params['datdir'] = '/Users/tzompantli/DAS/Stanford/BF/bins/'
    params['outdir'] = 'out/'
    params['nameout'] = 'DS.2018'
    params['df'] = 50.
    params['freq2analyze'] = [1., 3., 5., 8., 10, 12, 15, 20]
    params['slowmin'] = 0.05
    params['slowmax'] = 4.55
    params['slowstep'] = 0.05
    params['div_fact'] = 1  # div fact for downsampling
    params['nprep'] = True
    params['nbeam'] = True
    params['plot'] = True
    params['saveplot'] = True  #'ttt'
    params['show'] = True
    # params['saveresults']   = True
    params['nsta'] = len(model.load_stations('receivers.txt'))

    params['fftpower'] = 12  #precision
    params['freq_int'] = (0.5, 21)

    params['nhours'] = 1  # Input data is cut into chunks of length nhours.
    params['ndays'] = 1  #number of days to compute as a bunch
    params['threshold_std'] = 0.5
    params['onebit'] = True
    params['tempfilter'] = False  #True
    params['specwhite'] = True
    params['timenorm'] = False  #True

    params['src_param'] = [(90, 800000)
                           ]  #location and vel of source for array resp

    sl = arange(params['slowmin'], params['slowmax'], params['slowstep'])
    params['slowness'] = sl.reshape(1, sl.size)

    try:
        os.makedirs(params['outdir'])
    except:
        pass

    return params
Example #14
0
 def testIOStations(self):
     tempdir = tempfile.mkdtemp()
     fn = pjoin(tempdir, 'stations.txt')
     
     ne = model.Channel('NE', azimuth=45., dip=0.)
     se = model.Channel('SE', azimuth=135., dip=0.)
     stations = [ model.Station('', sta,'', 0.,0., 0., channels=[ne,se]) for sta in ['STA1', 'STA2'] ]
     model.dump_stations(stations, fn)
     stations = model.load_stations(fn)
    
     shutil.rmtree(tempdir)
Example #15
0
def readcolosseostations(scenario_path):
    stations = model.load_stations(scenario_path + '/meta/stations.txt')
    MetaL = []
    for sl in stations:
        channel = sl.channels[2]
        MetaL.append(
            Station(str(sl.network), str(sl.station), str(sl.location),
                    str(sl.channels[2])[:3], str(sl.lat), str(sl.lon),
                    str(sl.elevation), str(channel.dip), str(channel.azimuth),
                    str(channel.gain)))
    return MetaL
Example #16
0
    def testIOStations(self):
        tempdir = self.make_tempdir()
        fn = pjoin(tempdir, 'stations.txt')

        ne = model.Channel('NE', azimuth=45., dip=0.)
        se = model.Channel('SE', azimuth=135., dip=0.)
        stations = [
            model.Station('', sta, '', 0., 0., 0., channels=[ne, se])
            for sta in ['STA1', 'STA2']]

        model.dump_stations(stations, fn)
        stations = model.load_stations(fn)
Example #17
0
def readpyrockostations(path, disp, cfg):

    if disp is True:
        stations = model.load_stations(path + '/data/stations_cluster.txt')
    else:
        stations = model.load_stations(path + '/data/stations_cluster.txt')
    MetaL = []
    phases = cfg.config.ttphases
    for phase in phases:
        if phase is 'P':
            desired = 'Z'
        if phase is 'S':
            desired = 'T'
    for sl in stations:
        for channel in sl.channels:
            if channel.name[-1] == desired and len(channel.name) < 3:
                MetaL.append(
                    Station(str(sl.network), str(sl.station), str(sl.location),
                            str(channel.name), str(sl.lat), str(sl.lon),
                            str(sl.elevation), str(channel.dip),
                            str(channel.azimuth), str(channel.gain)))
    return MetaL
Example #18
0
    def testIOStations(self):
        tempdir = tempfile.mkdtemp(prefix='pyrocko-model')
        fn = pjoin(tempdir, 'stations.txt')

        ne = model.Channel('NE', azimuth=45., dip=0.)
        se = model.Channel('SE', azimuth=135., dip=0.)
        stations = [
            model.Station('', sta, '', 0., 0., 0., channels=[ne, se])
            for sta in ['STA1', 'STA2']
        ]

        model.dump_stations(stations, fn)
        stations = model.load_stations(fn)

        shutil.rmtree(tempdir)
    def __init__(self):

        self._stations = model.load_stations('DURCAL_stations_combined.txt')
        self._theoPhases = [cake.PhaseDef('p'), cake.PhaseDef('s'), cake.PhaseDef('Sv(cmb)s')]
        self._myStationBox = myStationBox()
        
        # Dataset from Orfeus and Mediterranean Network (*MDD*)
        # Note: combined files may have some doubled markers, due to repeated
        # picking. Alle multiple picks of one station are averaged, sothat there
        # is one mean pick in the end.
        self.Infiles = glob.glob('markers/*')
        for markerfile in self.Infiles:
            readandsplit(markerfile, self._myStationBox, self._stations)
        
        # debug: print all cards to terminal
        self._myStationBox.printCards()
Example #20
0
    def add_stations(self,
                     stations=None,
                     pyrocko_stations_filename=None,
                     stationxml_filenames=None):

        if stations is not None:
            for station in stations:
                self.stations[station.nsl()] = station

        if pyrocko_stations_filename is not None:
            logger.debug('Loading stations from file "%s"...' %
                         pyrocko_stations_filename)

            for station in model.load_stations(pyrocko_stations_filename):
                self.stations[station.nsl()] = station

        if stationxml_filenames is not None and len(stationxml_filenames) > 0:

            for stationxml_filename in stationxml_filenames:
                if not op.exists(stationxml_filename):
                    continue

                logger.debug('Loading stations from StationXML file "%s"...' %
                             stationxml_filename)

                sx = fs.load_xml(filename=stationxml_filename)
                ev = self.get_event()
                stations = sx.get_pyrocko_stations(time=ev.time)
                if len(stations) == 0:
                    logger.warning(
                        'No stations found for time %s in file "%s".' %
                        (util.time_to_str(ev.time), stationxml_filename))

                for station in stations:
                    logger.debug('Adding station: %s.%s.%s' % station.nsl())
                    channels = station.get_channels()
                    if len(channels) == 1 and channels[0].name.endswith('Z'):
                        logger.warning(
                            'Station "%s" has vertical component'
                            ' information only, adding mocked channels.' %
                            station.nsl_string())
                        station.add_channel(
                            model.Channel(channels[0].name[:-1] + 'N'))
                        station.add_channel(
                            model.Channel(channels[0].name[:-1] + 'E'))

                    self.stations[station.nsl()] = station
Example #21
0
def assoicate_single(ev, data_dir, store_id, store,
                     stations=None, pre=0.5,
                     post=3, reference_event=None, min_len=420,
                     pick_sigma=0.02):
    events = []
    waveforms = []
    labels = []
    gf_freq = store.config.sample_rate
    mod = store.config.earthmodel_1d
    found = False
    pathlist = Path(data_dir).glob('ev_*/')
    for path in sorted(pathlist):
        targets = []
        path = str(path)+"/"
        try:
            event = model.load_events(path+"event.txt")[0]
            if ev.time-10 < event.time and ev.time+10 > event.time:
                traces_loaded = io.load(path+"/waveforms/rest/traces.mseed")
                stations_unsorted = model.load_stations(data_dir+"stations.pf")
                for st in stations_unsorted:
                    st.dist = orthodrome.distance_accurate50m(st.lat, st.lon,
                                                              event.lat,
                                                              event.lon)
                    st.azi = orthodrome.azimuth(st.lat, st.lon, event.lat,
                                                event.lon)
                stations = sorted(stations_unsorted, key=lambda x: x.dist,
                                  reverse=True)

                traces_processed = []
                traces = wp.check_traces(traces_loaded, stations, min_len=min_len)

                traces_processed, nsamples = wp.process_loaded_waveforms(traces,
                                                                         stations,
                                                                         ev,
                                                                         gf_freq,
                                                                         mod,
                                                                         pre,
                                                                         post)
                if found is False:
                    events.append(event)
                    waveforms.append(traces_processed)
                    found = True
        except:
            pass
    data_events, nsamples = wp.prepare_waveforms(waveforms)
    return data_events, nsamples, event
Example #22
0
def raw_open(file, dt=0.05, shape=None, test=False, plot=False, saveplot=None):
    """Open a binary or npy file. If it has no header it will 
    reshape it as a `numpy.ndarray`, generaly 2D matrix. If shape is not 
    specified, the function try to reshape the input array
    based on the lenght of the stations.txt in the folder.
    
    :param file: path and file name
    :type file: str
    :param dt: deltat
    :type dt: float
    :param shape: shape of the output `numpy.ndarray`
    :type shape: tulpe 
    :params test: wether or not to print the size of the array in order to difine shape
    :type test: bool
    :param plot: whether or not to plot the output
    :type plot: bool
    
    :rtype: :class:`numpy.ndarray`
    :returns: reshaped bin
    """
    if test == True:
        print np.shape(np.fromfile(file, dtype=np.float32))
        return
    if file.endswith('.npy'):
        bin = np.load(file)
        x = np.shape(bin)[0]
        y = np.shape(bin)[1]
        shape = (x, y)
    else:
        bin = np.fromfile(file, dtype=np.float32)
        if type(shape) is tuple:
            pass
        elif shape == None and os.path.isfile('stations.txt'):
            x = len(model.load_stations('stations.txt'))
            y = len(bin)/x
            shape = (x, y)
        else:
            raise ShapeError('No stations.txt found and no shape specified!')
        bin = np.reshape(bin, shape)

    if plot:
        raw_plotshot(shape[0], shape[1], bin, dt, show=plot, save=saveplot )
 
    return bin
Example #23
0
def prep_data_batch(data_dir, store_id, stations=None, pre=0.5,
                    post=3, reference_event=None, min_len=420,
                    pick_sigma=0.02):
    engine = LocalEngine(store_superdirs=['/home/asteinbe/gf_stores'])
    store = engine.get_store(store_id)
    mod = store.config.earthmodel_1d
    gf_freq = store.config.sample_rate
    cake_phase = cake.PhaseDef("P")
    phase_list = [cake_phase]
    events = []
    waveforms = []
    waveforms_shifted = []
    events = scedc_util.scedc_fm_to_pyrocko(file)
    labels = labels_from_events(events)
    pathlist = Path(data_dir).glob('ev_0/')
    for path in sorted(pathlist):
        try:
            targets = []
            path = str(path)+"/"
            event = model.load_events(path+"event.txt")[0]
            traces_loaded = io.load(path+"traces.mseed")
            stations_unsorted = model.load_stations(data_dir+"stations.pf")
            for st in stations_unsorted:
                st.dist = orthodrome.distance_accurate50m(st.lat, st.lon,
                                                          event.lat,
                                                          event.lon)
                st.azi = orthodrome.azimuth(st.lat, st.lon, event.lat,
                                            event.lon)
            stations = sorted(stations_unsorted, key=lambda x: x.dist,
                              reverse=True)

            traces_processed = []
            traces = check_traces(traces_loaded, stations, min_len=min_len)
            traces_processed, nsamples = wp.process_loaded_waveforms(traces,
                                                                  stations,
                                                                  event,
                                                                  gf_freq,
                                                                  mod,
                                                                  pre, post)
            events.append(event)
            waveforms.append(traces_processed)
        except:
            pass
    return waveforms, nsamples, events, waveforms_shifted
Example #24
0
def sorted_plotshot(x,
                    y,
                    z,
                    dt,
                    sourcesta,
                    cmap=cm.gray,
                    show=True,
                    save=None):
    """to be continued"""
    from pyrocko import model, orthodrome
    from obspy.geodetics.base import gps2dist_azimuth as gps2dist
    dists = []
    dists2 = []
    stations = model.load_stations('stations.txt')
    for a in stations:
        if a.station == sourcesta:
            break
#  for s in stations[2602:]:
    for b in stations[:2602]:
        d = gps2dist(a.lat, a.lon, b.lat, b.lon)[0] / 1000.
        #    di = orthodrome.distance_accurate50m(a, b)/1000.
        #        print d[0],di
        dists.append(d)
    #    dists2.append(di)
#    dists = np.array(dists)[0]

    np.set_printoptions(threshold=np.nan)
    #print  np.max(dists), len(dists)
    a = np.argsort(dists)
    #b = np.argsort(dists2)
    #print a[:100],b[:100]

    z = z[a]
    fmin = 0.1
    fmax = 0.25
    #for u in z:
    #    u = bandpass(u,fmin,fmax,1./dt)
    #    plt.plot(u)
    ##plt.plot(z[111])
    #    plt.show()
    np.save('tmp.npy', z)
    raw_plotshot(x, y, z, dt, save='tmp')
Example #25
0
def get_params():
   # Get Configuration
    params = {}
    ######################
    # params for BINFILE #
    ######################
    params['stations']  = model.load_stations('stationsBO.txt')
#    params['receivers']  = model.load_stations('receivers.txt')
    params['WORKDIR']               = os.getcwd()
    params['downsampling']          = False#True
    params['df']                    = 20. 
    params['dt']                    = 1./params['df']
    params['bin_duration']          = (3600.*24)#/4
    params['percentfill']           = 10
    params['freqmax']               = 9.8
    params['freqmin']               = 0.01
    params['stop']                  = 0 # important param but default = 0
    params['sizeout']               = 415 #size of the output matrix (sizein - stop)
    ###################
    # params for CORR #
    ###################
    params['rotation']              = False
    params['corrType']              = 'crosscoherence'
    params['analysis_duration']     = (3600.*24)#/24 # how much time to process as bunch. must stay like that not implemented to change now
    params['temp_norm']             = 0 # 0: removing eq with param 'clipping'; -1: 1-bit normalization; 1: Windsorinzing with param 'clipping'   
    params['clipping']              = 3. # clipping eq or windsorizing at 3 * std(trace)
    params['overlap']               = 0.4 
    params['corr_duration']         = 250. # slicing the 86400 in small windows
    params['npts']                  = params['corr_duration'] * params['df']
    params['nthreads']              = 12 
    params['outname']               = 'BO.BO' #net1.net2
    ##################
    # params for PAZ #
    ##################
    #"""/!\ hard coded in CorrelBinSingle.py. Sta a or sta b"""
    paz =corn_freq_2_paz(15.0, damp=0.57)
    #paz['sensitivity']=(76*16/16)/5.9605e-7
    params['paz'] = paz
    params['pre_filt'] = (0.05, 0.055, 9.0, 9.5)
    
    return params
Example #26
0
def run_example(project_name, config_path, quick_config_path, event_name):
    project_dir = project_name
    if os.path.exists(project_dir):
        shutil.rmtree(project_dir)

    grond('init', project_name, project_dir)
    with chdir(project_dir):
        assert os.path.isdir('config')

        common.link_test_data('events/%s/' % event_name,
                              'data/events/%s/' % event_name)

        env = Environment([config_path, event_name])
        conf = env.get_config()

        store_ids = conf.get_elements('target_groups[:].store_id')
        for store_id in store_ids:
            store_path = 'gf_stores/%s/' % store_id
            if not os.path.exists(store_path):
                common.link_test_data(store_path)

        problem_name = env.get_problem().name
        rundir_path = expand_template(conf.rundir_template,
                                      dict(problem_name=problem_name))

        grond('check', config_path, event_name,
              '--save-stations-used=used_stations.txt')

        sorted(s.station for s in model.load_stations('used_stations.txt'))

        mod_conf = conf.clone()
        mod_conf.set_elements('analyser_configs[:].niterations', 100)
        mod_conf.set_elements('optimiser_config.sampler_phases[:].niterations',
                              100)
        mod_conf.set_elements('optimiser_config.nbootstrap', 10)
        mod_conf.set_basepath(conf.get_basepath())
        config.write_config(mod_conf, quick_config_path)
        grond('go', quick_config_path, event_name)
        grond('harvest', '--force', '--export-fits=best,mean', rundir_path)
        grond('report', rundir_path)
Example #27
0
def ropen(file, dt=0.05, shape=None):#, plot=False, saveplot=None):
    """Same as raw_open but return all arguments (shape, dt) for plot facilities.
    Open a binary or npy file. If it has no header it will 
    reshape it as a `numpy.ndarray`, generaly 2D matrix. If shape is not 
    specified, the function try to reshape the input array
    based on the lenght of the stations.txt in the folder.

    :param file: path and file name
    :type file: str
    :param dt: deltat
    :type dt: float
    :param shape: shape of the output `numpy.ndarray`
    :type shape: tulpe 
    #:param plot: whether or not to plot the output
    #:type plot: bool

    :rtype: :class:`numpy.ndarray`
    :returns: reshaped bin, shape1, shape2, dt
    """
    if file.endswith('.npy'):
        bin = np.load(file)
        #x = np.shape(bin)[0]
        #y = np.shape(bin)[1]
        #shape = (x, y)
    else:
        bin = np.fromfile(file, dtype=np.float32)
        if type(shape) is tuple:
            pass
        elif shape == None and os.path.isfile('stations.txt'):
            x = len(model.load_stations('stations.txt'))
            y = len(bin)/x
            shape = (x, y)
        else:
            raise Exception('No stations.txt found and no shape specified!')
        
        bin = np.reshape(bin, shape)
    #if plot:
    #    raw_plotshot(shape[0], shape[1], bin, dt, show=plot, save=saveplot )

    return bin#, shape[0], shape[1], dt   
Example #28
0
def beam(scenario_folder, n_tests=1, show=False):
    nstart = 8
    array_centers = []

    events = []
    stations = []
    mod = insheim_layered_model()

    for i in range(nstart, n_tests):
        print("%s/scenario_%s/event.txt" % (scenario_folder, i))

        events.append(model.load_events("%s/scenario_%s/event.txt" % (scenario_folder, i))[0])
        stations.append(model.load_stations("%s/scenario_%s/stations.pf" % (scenario_folder, i)))
        traces = io.load(pjoin("%sscenario_%s/" % (scenario_folder, i), 'traces.mseed'))

        event = events[0]
        stations = stations[0]
        min_dist = min(
            [ortho.distance_accurate50m(s, event) for s in stations])
        max_dist = max(
            [ortho.distance_accurate50m(s, event) for s in stations])
        tmin = CakeTiming(phase_selection='first(p|P|PP)-10', fallback_time=0.001)
        tmax = CakeTiming(phase_selection='first(p|P|PP)+52', fallback_time=1000.)
        timing=(tmin, tmax)
        tstart = timing[0].t(mod, (event.depth, min_dist))
        tend = timing[1].t(mod, (event.depth, max_dist))

        normalize = True
        bf = BeamForming(stations, traces, normalize=normalize)
        bf.process(event=event,
                   timing=tmin,
                   fn_dump_center=pjoin("%sscenario_%s/" % (scenario_folder, i), 'array_center.pf'),
                   fn_beam=pjoin("%sscenario_%s/" % (scenario_folder, i), 'beam.mseed'),
                   station="INS")
        if show is True:
            bf.plot(fn=pjoin("%sscenario_%s/" % (scenario_folder, i), 'beam_shifts.png'))

        array_centers.append(bf.station_c)
Example #29
0
        height=30.,
        show_grid=False,
        show_topo=True,
        color_dry=(238, 236, 230),
        topo_cpt_wet='light_sea_uniform',
        topo_cpt_dry='light_land_uniform',
        illuminate=True,
        illuminate_factor_ocean=0.15,
        show_rivers=False,
        show_plates=True)

# Draw some larger cities covered by the map area
m.draw_cities()

# Generate with latitute, longitude and labels of the stations
stations = model.load_stations('stations_deadsea.pf')
lats = [s.lat for s in stations]
lons = [s.lon for s in stations]
labels = ['.'.join(s.nsl()) for s in stations]

# Stations as black triangles. Genuine GMT commands can be parsed by the maps'
# gmt attribute. Last argument of the psxy function call pipes the maps'
# pojection system.
m.gmt.psxy(in_columns=(lons, lats), S='t20p', G='black', *m.jxyr)

# Station labels
for i in range(len(stations)):
    m.add_label(lats[i], lons[i], labels[i])

# Load events from catalog file (generated using catalog.GlobalCMT()
# download from www.globalcmt.org)
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return 

    event = model.load_events(settings.event_filename)
    assert len(event)==1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
    assert len(station) == 1
    station = station[0] 
    targets = [station_to_target(station, quantity=quantity, store_id=settings.store_id)]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz-minz)*0.02
    for s, t, tr in request.iter_results():
        if quantity=='velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t(
            'begin', (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata()-onset-s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time+onset+zoom_window[0],
                          tmax=event.time+onset+zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata/ampl_scale)*relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color='black', alpha=0.5)
        ax.text(zoom_window[0]*1.09, y_pos, '%1.1f' % (s.depth/1000.), horizontalalignment='right') #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                      distances=[s.distance_to(t)*cake.m2d],
                                      zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t-onset]*2
                y = [y_pos-(maxz-minz)*0.025, y_pos+(maxz-minz)*0.025]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(x_marker[1]-x_marker[1]*0.005, y[1], label,
                        #fontsize=12,
                        color='black',
                        verticalalignment='top',
                        horizontalalignment='right')

            except IndexError:
                logger.warning('no pP phase at d=%s z=%s stat=%s' % (s.distance_to(t)*cake.m2d,
                                                                     s.depth, station.station))
                pass

    if len(traces)==0:
        raise Exception('No Trace found!')
    if len(traces)>1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity=='displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata()-onset+correction
        tr_ydata = tr.get_ydata() *-1
        visible = tr.chop(tmin=onset+zoom_window[0]+correction,
                          tmax=onset+zoom_window[1]+correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata/ampl_scale * settings.gain*settings.gain_record)*relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin-zrange*0.2, zmax+zrange*0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0, 0.6, 'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure) #, fontsize=12.)

    if fill:
        ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color=settings.color, alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax+zrange*0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {'array-id': ''.join(station.nsl()),
                  'event_name': event.name,
                  'event_time': time_to_str(event.time)}
        ax.text(0.5, 1.05, settings.title % params,
                horizontalalignment='center', 
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters':''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. '%(f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1./store.config.deltat
        cax.text(0, 0, 'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s' % captions,
                 fontsize=12, transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()
Example #31
0
                        S='%s%g' % ('m', size_cm * 2.0),
                        #G=gmtpy.color(colors[e.cluster]),
                        #G=colors[i_e],
                        G='red',
                        C='3p,0/0/0',
                        #W='thinnest,%i/%i/%i' % (255, 255, 255),
                        #L='thinnest,%i/%i/%i' % (255, 255, 255),
                        in_rows=[data],
                        *_map.jxyr)
    _map.save(outpath=outfn)


if __name__ == '__main__':
    e = list(Event.load_catalog(filename='event.pf'))[0]
    #stations = model.load_stations('arrays.pf')
    stations = model.load_stations('array_center.pf')
    color_wet = [200, 200, 200]
    color_dry = [253, 253, 253]
    params = MapParameters(lat=e.lat,
                           lon=e.lon,
                           radius=8000000,
                           outfn='array-map-new.pdf',
                           stations=stations,
                           events=[e],
                           show_topo=False,
                           show_grid=False,
                           color_wet=color_wet,
                           color_dry=color_dry)
    make_map(map_parameters=params)
    print('.' * 40)
    fdomain_station_locs = []
km = 1000

derec_home = os.environ['DEREC_HOME']

store_id = 'doctar_mainland_20Hz'

e = LocalEngine(store_superdirs=[pjoin(derec_home, 'fomostos')])

store = e.get_store(store_id)

m = store.config.earthmodel_1d

event = model.Event(load=pjoin(derec_home, 'mseeds/doctar/doctar_2011-11-01/doctar_2011-11-01_quakefile.dat'))
statfn = pjoin(derec_home, 'mseeds/doctar/doctar_2011-11-01/stations.txt')
stations = model.load_stations(statfn)
stations_distance = collections.defaultdict()
for s in stations:
    d = orthodrome.distance_accurate50m_numpy(s.lat,s.lon,event.lat, event.lon)
    stations_distance[d] = s.station

#phases = ['p', 'P', 'Pv11.p']
phases = ['p']


colormap = collections.defaultdict()
num_colors = 12
colors = list(num.linspace(0,254,num_colors))
cmap = plt.get_cmap('hsv')
distances = num.linspace(1, 58, 120)*km*cake.m2d
#distances = num.linspace(1, 58, 40)*km*cake.m2d
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event)==1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % []) 
            else:
                station = station[0]
            targets = [station_to_target(station, quantity=settings.quantity, store_id=settings.store_id)]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' % error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets, sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces)==1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            mesh_fwidth = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            misfits_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc-fc*2./5.
                fr_max = fc+fc*2./5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc+width*0.5), order=4, type='low'),
                            ButterworthResponse(corner=float(fc-width*0.5), order=4, type='high')]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(candidate=candidate, setup=settings.misfit_setup, debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(len(test_depths)+1, 1, i+1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m/n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print 'best fits: \n', i_best_fits
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits*100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
Example #34
0
                response=cr.response)

            stations[net, sta].channel_list.append(channel)
        else:
            logger.warn('no station information for %s.%s.%s' %
                        (net, sta, loc))

    for station in stations.values():
        station.channel_list.sort(key=lambda c: (c.location_code, c.code))

    return fs.FDSNStationXML(
        source='Converted from Pyrocko stations file and RESP information',
        created=time.time(),
        network_list=[networks[net_] for net_ in sorted(networks.keys())])


if __name__ == '__main__':
    import sys
    from pyrocko import model

    util.setup_logging(__name__)

    if len(sys.argv) < 2:
        sys.exit('usage: python -m pyrocko.station.resp <stations> <resp> ...')

    stations = model.load_stations(sys.argv[1])

    sxml = make_stationxml(stations, iload(sys.argv[2:]))

    print sxml.dump_xml()
from pyrocko import pile, io, util, model
from pyrocko.example import get_example_data

# Download example data
get_example_data('data_conversion', recursive=True)

input_path = 'data_conversion/mseed'
output_path = 'data_conversion/sac/' \
        '%(dirhz)s/%(station)s_%(channel)s_%(tmin)s.sac'

fn_stations = 'data_conversion/stations.txt'

stations_list = model.load_stations(fn_stations)

stations = {}
for s in stations_list:
    stations[s.network, s.station, s.location] = s
    s.set_channels_by_name(*'BHN BHE BHZ BLN BLE BLZ'.split())

p = pile.make_pile(input_path)
h = 3600.
tinc = 1*h
tmin = util.day_start(p.tmin)
for traces in p.chopper_grouped(tmin=tmin, tinc=tinc,
                                gather=lambda tr: tr.nslc_id):
    for tr in traces:
        dirhz = '%ihz' % int(round(1./tr.deltat))
        io.save(
            [tr], output_path,
            format='sac',
            additional={'dirhz': dirhz},
Example #36
0
        plt.show()

    def ydata_of_target(self, sources, target):
        if sources == []:
            sources = self.sources
        for source in sources:
            ssmgrm = self.seismograms[source][target]
            yield source, ssmgrm.get_xdata(), ssmgrm.get_ydata()

    @property
    def store(self):
        return self.engine.get_store(self.store_id)


if __name__ ==  "__main__":

    selfdir = pjoin(os.getcwd(), __file__.rsplit('/', 1)[0])
    selfdir = selfdir.rsplit('/')[0]
    
    # load stations from file:
    stations = model.load_stations(pjoin(selfdir,
                            '../reference_stations_castor_selection.txt'))

    #traces = io.load(pjoin(selfdir, '../traces/2013-10-01T03-32-45/2013-10-01*'))

    markers = gui_util.Marker.load_markers(pjoin(selfdir,
                                        '../reference_marker_castor.txt'))

    C = Core(markers=markers, stations=stations)
    #fbands = []
    #fbands.apppend([1.0, 2.0])
    #fbands.apppend([2.0, 6.0])
    #fbands.apppend([4.0, 10.])

    phases = LocalEngine(store_superdirs=['/data/stores'],
                         default_store_id='globalttt').get_store()

    #filenames = glob.glob('data/*.mseed')
    #filenames = glob.glob('/data/webnet/waveform_R/2008/*.mseed')
    #datapath = '/data/webnet/mseed/2008'
    #datapath = '/data/webnet/waveform_R/2008'
    #datapath = '/data/share/Res_all_NKC'
    datapath = '/media/usb0/Res_all_NKC_taper'
    #datapath = '/media/usb0/restituted_pyrocko'
    stations = model.load_stations('../data/stations.pf')
    reference_id ='NKC'
    references = {}
    data_pile = pile.make_pile(datapath, selector='rest_*')


    fband = {'order':4, 'corner_hp':1.0, 'corner_lp':4.}
    window = StaticLengthWindow(static_length=30., 
                                phase_position=0.5)

    taper = trace.CosFader(xfrac=0.25)

    #event_selector = EventSelector(distmin=1000*km,
    #                               distmax=20000*km,
    #                               depthmin=2*km,
    #                               depthmax=600*km,
                                             phasename=phasename, 
                                             event=event)
                    picks.append(m)
            progbar.update(i)

        progbar.finish()
        with open(fn, 'w') as f:
            f.write(datastr)

        gui_util.save_markers(picks, 'picks.pf')

    def reduce_stf(self, event, t):
        t -= self.interp_stf(event.magnitude)/2.
        return t
if __name__=='__main__':
    stations = model.load_stations('/media/usb/webnet/meta/stations.pf')
    events = []
    for efile in glob.glob('/home/marius/src/swarming/unperturbed/*/event.pf'):
        events.append(model.Event(load=efile))
    #events = model.load_events('/home/marius/src/swarming/events_swarm.pf')
    want_phases = {'p':('p', 'P'), 's':('s', 'S')}
    e = LocalEngine(use_config=True)
    s = e.get_store('vogtland_fischer_horalek_2000_vpvs169_minus4p')
    m = s.config.earthmodel_1d
    b = Bakery(stations=stations, 
               events=events,
               wanted_phases=want_phases, 
               model=m)
    model.dump_events(events, 'dumped_catalog.pf')
    b.run_n_write('picks.dat')
                        help='name of file containing station information',
                        required=True)
    parser.add_argument('--events',
                        help='name of file containing event catalog',
                        default=False,
                        required=False)
    parser.add_argument('--printall',
                        help='Print all results to terminal',
                        default=True,
                        required=False,
                        action='store_true')
    parser.add_argument('--show',
                        help='show figure at the end',
                        default=False,
                        required=False,
                        action='store_true')
    args = parser.parse_args()

    stations = model.load_stations(args.stations)

    if args.usestations:
        stations = [s for s in stations if util.match_nslc(args.usestations, s.nsl())]

    events = []
    if args.events:
        events.extend(model.load_events(args.events))
    if args.markers:
        markers = gui_util.load_markers(args.markers)
        events.extend([m.get_event() for m in markers])
    get_bounds(stations, events=events, usestations=args.usestations, printall=args.printall, show_fig=args.show)
def doit(sdr=None):
    azimutti = num.linspace(0,360, 31)
    tmax = 2.
    f = plt.figure(figsize=(2.,2.1))
    ax = f.add_subplot(111, polar=True)
    AZIMS = []
    DS=[]
    VALS=[]
    STATIONAZIMS= []
    STATIONDISTS = []

    ref_source = DCSource.from_pyrocko_event(ref_event)
    channels = 'Z'
    targets = [Target(lat=ref_source.lat, 
                      lon=ref_source.lon,
                      north_shift=north_shift,
                      store_id=store_id,
                      codes=('','','',channel)) for channel in channels for north_shift in
                                                          distances] 

    stations = model.load_stations(pjoin(derec_home, 'mseeds', 'doctar',
                                'doctar_2011-11-01', 'stations.txt'))
    doctar_targets = du.stations2targets(stations, store_id)

    for t in doctar_targets:
        az = t.azibazi_to(ref_source)
        d = t.distance_to(ref_source)
        STATIONAZIMS.append(az[1]*(num.pi/180))
        STATIONDISTS.append(d/1000.)

    if sdr is not None:
        ref_source.strike = sdr[0]
        ref_source.dip = sdr[1]
        ref_source.rake = sdr[2]

    st = ref_source.strike
    di = ref_source.dip
    ra = ref_source.rake

    for azim in azimutti:
        print azim
        z_intmin = defaultdict()
        ymin_int = defaultdict()
        ref_source.strike = azim
        sources = du.test_event_generator(ref_source, depths)

        resp = e.process(sources=sources, targets=targets)
        tracs = du.response_to_dict(resp)

        stf = [[0.,0.15],[0.,1.]]
        du.apply_stf(tracs, stf)

        tmin_arrivals = defaultdict()
        dt = e.get_store(store_id).config.sample_rate
        tmax_samples = tmax*dt
        ymaxs = defaultdict(list)

        phase_ids = ['p', 'pP', 'P']
        all_ars = defaultdict()
        for d in distances:
            for z in depths:
                ts = []
                key = (d, z)
                phases = map(cake.PhaseDef, phase_ids)

                for a in m.arrivals(phases=phases, distances=[d*cake.m2d], zstart=z):
                    ts.append(a)
                all_ars[(key)] = ts

                #tmin_arrivals[key] = min([tr.t for tr in ts])
                tmin_arrivals[key] = pc.cake_first_arrival(d, z, m, phase_ids)

        sources_dict = defaultdict()

        axes_dict = defaultdict(dict)
        #for t in targets:
        #    fig, axs = plt.subplots(len(sources), 1, sharey=True, figsize=(4,3), dpi=160)
        #    fig.myadjustment = False
        #    ad = dict(zip(sources, axs))
        #    axes_dict[t] = ad

        for s, tt in tracs.items():
            # das muss vor die loop fuer stacking
            for t, tr in tt.items():
                #ax = axes_dict[t][s]
                d = s.distance_to(t)
                z = s.depth

                tr.lowpass(2, 4)
                tr.highpass(4, 0.5)
                x = tr.get_xdata()-ref_source.time-tmin_arrivals[(d,z)]
                y = tr.get_ydata()
                x0_i = num.where(abs(x)==min(abs(x)))
                #/prinprint 'has to be single value: ', x0_i
                if len(x0_i)!=1:
                    print "WAAAARNING"

                x0_i = x0_i[0][0]
                y_plot = y[x0_i:x0_i+tmax_samples]

                y_int = num.sum((y_plot**2))/tmax_samples

                try:
                    if y_int<ymin_int[d]:
                        ymin_int[d] = y_int
                        z_intmin[d] = s.depth
                except KeyError: 
                    ymin_int[d] = y_int
                    z_intmin[d] = s.depth

        #for d, minz in z_intmin.items():
        azim = azim/180*num.pi
        azims = [azim]*len(z_intmin.keys())
        d = z_intmin.keys()
        vals = z_intmin.values()
        DS.extend(d)
        AZIMS.extend(azims)
        VALS.extend(vals)
    

    #VALS = 5.-num.array(VALS)/1000.
    #VALS = correct_depth-num.array(VALS)/1000.
    VALS = num.array(VALS)/1000.
    #DS = [d-ref_source.depth for d in DS]
    AZIMS = num.array(AZIMS)
    DS = num.array(DS)/1000.
    fn = 'polar_%s_%s_%s_%s.txt'%(test_type, st, di, ra) 
    num.savetxt(fn, num.array((AZIMS, DS, VALS)))
def main(args=None):
    if args is None:
        args = sys.argv[1:]

    parser = OptionParser(
        usage=usage,
        description=description)

    parser.add_option(
        '--width',
        dest='width',
        type='float',
        default=20.0,
        metavar='FLOAT',
        help='set width of output image [cm] (%default)')

    parser.add_option(
        '--height',
        dest='height',
        type='float',
        default=15.0,
        metavar='FLOAT',
        help='set height of output image [cm] (%default)')

    parser.add_option(
        '--topo-resolution-min',
        dest='topo_resolution_min',
        type='float',
        default=40.0,
        metavar='FLOAT',
        help='minimum resolution of topography [dpi] (%default)')

    parser.add_option(
        '--topo-resolution-max',
        dest='topo_resolution_max',
        type='float',
        default=200.0,
        metavar='FLOAT',
        help='maximum resolution of topography [dpi] (%default)')

    parser.add_option(
        '--no-grid',
        dest='show_grid',
        default=True,
        action='store_false',
        help='don\'t show grid lines')

    parser.add_option(
        '--no-topo',
        dest='show_topo',
        default=True,
        action='store_false',
        help='don\'t show topography')

    parser.add_option(
        '--no-cities',
        dest='show_cities',
        default=True,
        action='store_false',
        help='don\'t show cities')

    parser.add_option(
        '--no-illuminate',
        dest='illuminate',
        default=True,
        action='store_false',
        help='deactivate artificial illumination of topography')

    parser.add_option(
        '--illuminate-factor-land',
        dest='illuminate_factor_land',
        type='float',
        metavar='FLOAT',
        help='set factor for artificial illumination of land (0.5)')

    parser.add_option(
        '--illuminate-factor-ocean',
        dest='illuminate_factor_ocean',
        type='float',
        metavar='FLOAT',
        help='set factor for artificial illumination of ocean (0.25)')

    parser.add_option(
        '--theme',
        choices=['topo', 'soft'],
        default='topo',
        help='select color theme, available: topo, soft (topo)"')

    parser.add_option(
        '--download-etopo1',
        dest='download_etopo1',
        action='store_true',
        help='download full ETOPO1 topography dataset')

    parser.add_option(
        '--download-srtmgl3',
        dest='download_srtmgl3',
        action='store_true',
        help='download full SRTMGL3 topography dataset')

    parser.add_option(
        '--make-decimated-topo',
        dest='make_decimated',
        action='store_true',
        help='pre-make all decimated topography datasets')

    parser.add_option(
        '--stations',
        dest='stations_fn',
        metavar='FILENAME',
        help='load station coordinates from FILENAME')

    parser.add_option(
        '--events',
        dest='events_fn',
        metavar='FILENAME',
        help='load event coordinates from FILENAME')

    parser.add_option(
        '--debug',
        dest='debug',
        action='store_true',
        default=False,
        help='print debugging information to stderr')

    (options, args) = parser.parse_args(args)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.download_etopo1:
        import pyrocko.datasets.topo.etopo1
        pyrocko.datasets.topo.etopo1.download()

    if options.download_srtmgl3:
        import pyrocko.datasets.topo.srtmgl3
        pyrocko.datasets.topo.srtmgl3.download()

    if options.make_decimated:
        import pyrocko.datasets.topo
        pyrocko.datasets.topo.make_all_missing_decimated()

    if (options.download_etopo1 or options.download_srtmgl3 or
            options.make_decimated) and len(args) == 0:

        sys.exit(0)

    if options.theme == 'soft':
        color_kwargs = {
            'illuminate_factor_land': options.illuminate_factor_land or 0.2,
            'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.15,
            'color_wet': (216, 242, 254),
            'color_dry': (238, 236, 230),
            'topo_cpt_wet': 'light_sea_uniform',
            'topo_cpt_dry': 'light_land_uniform'}
    elif options.theme == 'topo':
        color_kwargs = {
            'illuminate_factor_land': options.illuminate_factor_land or 0.5,
            'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.25}

    events = []
    if options.events_fn:
        events = model.load_events(options.events_fn)

    stations = []

    if options.stations_fn:
        stations = model.load_stations(options.stations_fn)

    if not (len(args) == 4 or (
            len(args) == 1 and (events or stations))):

        parser.print_help()
        sys.exit(1)

    if len(args) == 4:
        try:
            lat = float(args[0])
            lon = float(args[1])
            radius = float(args[2])*km
        except Exception:
            parser.print_help()
            sys.exit(1)
    else:
        lats, lons = latlon_arrays(stations+events)
        lat, lon = map(float, od.geographic_midpoint(lats, lons))
        radius = float(
            num.max(od.distance_accurate50m_numpy(lat, lon, lats, lons)))
        radius *= 1.1

    m = automap.Map(
        width=options.width,
        height=options.height,
        lat=lat,
        lon=lon,
        radius=radius,
        topo_resolution_max=options.topo_resolution_max,
        topo_resolution_min=options.topo_resolution_min,
        show_topo=options.show_topo,
        show_grid=options.show_grid,
        illuminate=options.illuminate,
        **color_kwargs)

    logger.debug('map configuration:\n%s' % str(m))

    if options.show_cities:
        m.draw_cities()

    if stations:
        lats = [s.lat for s in stations]
        lons = [s.lon for s in stations]

        m.gmt.psxy(
            in_columns=(lons, lats),
            S='t8p',
            G='black',
            *m.jxyr)

        for s in stations:
            m.add_label(s.lat, s.lon, '%s' % '.'.join(
                x for x in s.nsl() if x))

    if events:
        beachball_symbol = 'mt'
        beachball_size = 20.0
        for ev in events:
            if ev.moment_tensor is None:
                m.gmt.psxy(
                    in_rows=[[ev.lon, ev.lat]],
                    S='c12p',
                    G=gmtpy.color('scarletred2'),
                    W='1p,black',
                    *m.jxyr)

            else:
                devi = ev.moment_tensor.deviatoric()
                mt = devi.m_up_south_east()
                mt = mt / ev.moment_tensor.scalar_moment() \
                    * pmt.magnitude_to_moment(5.0)
                m6 = pmt.to6(mt)
                data = (ev.lon, ev.lat, 10) + tuple(m6) + (1, 0, 0)

                if m.gmt.is_gmt5():
                    kwargs = dict(
                        M=True,
                        S='%s%g' % (
                            beachball_symbol[0], beachball_size / gmtpy.cm))
                else:
                    kwargs = dict(
                        S='%s%g' % (
                            beachball_symbol[0], beachball_size*2 / gmtpy.cm))

                m.gmt.psmeca(
                    in_rows=[data],
                    G=gmtpy.color('chocolate1'),
                    E='white',
                    W='1p,%s' % gmtpy.color('chocolate3'),
                    *m.jxyr,
                    **kwargs)

    m.save(args[-1])
            default='google',
            help='map provider [google | osm] (default=osm)')


def __snufflings__():
    return [MapMaker()]


if __name__ == '__main__':
    util.setup_logging('map.py', 'info')
    s = MapMaker()
    options, args, parser = s.setup_cli()
    s.markers = []

    if options.stations_filename:
        stations = model.load_stations(options.stations_filename)
        s.stations = stations
    else:
        s.stations = None

    if options.events_filename:
        events = model.load_events(filename=options.events_filename)
        markers = [gui_util.EventMarker(e) for e in events]
        s.markers.extend(markers)

    if options.markers_filename:
        markers = gui_util.load_markers(options.markers_filename)
        s.markers.extend(markers)
    s.open_external = True
    mapmap = {'google': 'Google Maps', 'osm': 'OpenStreetMap'}
    s.map_kind = mapmap[options.map_provider]
Example #43
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option('--force',
                      dest='force',
                      action='store_true',
                      default=False,
                      help='allow recreation of output <directory>')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    parser.add_option('--dry-run',
                      dest='dry_run',
                      action='store_true',
                      default=False,
                      help='show available stations/channels and exit '
                      '(do not download waveforms)')

    parser.add_option('--continue',
                      dest='continue_',
                      action='store_true',
                      default=False,
                      help='continue download after a accident')

    parser.add_option('--local-data',
                      dest='local_data',
                      action='append',
                      help='add file/directory with local data')

    parser.add_option('--local-stations',
                      dest='local_stations',
                      action='append',
                      help='add local stations file')

    parser.add_option('--selection',
                      dest='selection_file',
                      action='append',
                      help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option('--local-responses-pz',
                      dest='local_responses_pz',
                      action='append',
                      help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
        '(default) or east-north-up [enu]')

    parser.add_option('--out-units',
                      choices=['M', 'M/S', 'M/S**2'],
                      dest='output_units',
                      default='M',
                      help='set output units to displacement "M" (default),'
                      ' velocity "M/S" or acceleration "M/S**2"')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=3.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
        '(default: 5)')

    parser.add_option(
        '--zero-padding',
        dest='zero_pad',
        action='store_true',
        default=False,
        help='Extend traces by zero-padding if clean restitution requires'
        'longer windows')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
        '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
        'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='geofon,iris,orfeus',
        help='sites to query (available: %s, default: "%%default"' %
        ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H,L',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option('--radius-min',
                      dest='radius_min',
                      metavar='VALUE',
                      default=0.0,
                      type=float,
                      help='minimum radius [km]')

    parser.add_option('--nstations-wanted',
                      dest='nstations_wanted',
                      metavar='N',
                      type=int,
                      help='number of stations to select initially')

    (options, args) = parser.parse_args(sys.argv[1:])

    print('Parsed arguments:', args)
    if len(args) not in (10, 7, 6):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (options.local_responses_pz, options.local_responses_resp,
                     options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 3

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        cwd = str(sys.argv[1])
        event_dir = op.join(cwd, 'data', 'events', eventname)
        output_dir = op.join(event_dir, 'waveforms')
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(latitude=lat,
                              longitude=lon,
                              minradius=options.radius_min * km * cake.m2d,
                              maxradius=radius * cake.m2d,
                              channel=','.join('%s??' % s
                                               for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ['M/S', 'M', 'M/S**2']

    # output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw', fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.raw.txt')
    fn_template_rest = op.join(output_dir, 'rest', fn_template0)
    fn_commandline = op.join(output_dir, 'beatdown.command')

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = fdsn.station(site=site,
                              format='text',
                              level='channel',
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error('No stations matching given criteria. (%s)' % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[
                        nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))

    # station weeding
    if options.nstations_wanted:
        nsls_selected = None
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon
                    try:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude.value,
                            channel.longitude.value)
                    except:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude, channel.longitude)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = float(tmax_ + tpad)

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)
                    if channel.sample_rate:
                        try:
                            deltat = 1.0 / int(channel.sample_rate.value)
                        except:
                            deltat = 1.0 / int(channel.sample_rate)
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        logger.debug('deltat %f' % deltat)
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        # apparently the WS are only sensitive to full seconds
                        # round to avoid gaps, increase safetiy window
                        selection.append(nslc +
                                         (math.floor(tmin_req - deltat * 20.0),
                                          math.ceil(tmax_req + deltat * 20.0)))
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]
                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i // neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            tr.fix_deltat_rounding_errors()
                            logger.debug('cutting window: %f - %f' %
                                         (tmin_win, tmax_win))
                            logger.debug(
                                'available window: %f - %f, nsamples: %g' %
                                (tr.tmin, tr.tmax, tr.ydata.size))
                            try:
                                logger.debug('tmin before snap %f' % tr.tmin)
                                tr.snap(interpolate=True)
                                logger.debug('tmin after snap %f' % tr.tmin)
                                tr.chop(tmin_win,
                                        tmax_win,
                                        snap=(math.floor, math.ceil),
                                        include_last=True)
                                logger.debug(
                                    'cut window: %f - %f, nsamles: %g' %
                                    (tr.tmin, tr.tmax, tr.ydata.size))
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn('an error occurred while downloading data '
                                    'for channels \n  %s' %
                                    '\n  '.join('.'.join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' %
                        (nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info('selected (%s): %i channel%s (%i station%s)' %
                        ('+'.join(sites), nchannels, plural_s(nchannels),
                         nstations, plural_s(nstations)))

        logger.info('selected total: %i channel%s (%i station%s)' %
                    (nchannels_all, plural_s(nchannels_all), nstations_all,
                     plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = fdsn.station(site=site,
                                     level='response',
                                     selection=selection)

            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        logger.info(
            'Importing local data from %s between %s (%f) and %s (%f)' %
            (options.local_data, util.time_to_str(tmin), tmin,
             util.time_to_str(tmax), tmax))
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    logger.debug('Getting response for %s' % tr.__str__())
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=options.output_units)

                    break

                except stationxml.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except stationxml.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    if tr.tmin > tmin and options.zero_pad:
                        logger.warning(
                            'Trace too short for clean restitution in '
                            'desired frequency band -> zero-padding!')
                        tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat')

                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap='crossfade_cos')

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared', fn_template1)
    fn_stations = op.join(output_dir, 'stations.prepared.txt')
    fn_event = op.join(event_dir, 'event.txt')
    fn_event_yaml = op.join(event_dir, 'event.yaml')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    traces_beat = []
    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr)
                traces_beat.append(tr_beat)
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    from pyrocko.guts import dump
    dump([event], filename=fn_event_yaml)

    utility.dump_objects(op.join(cwd, 'seismic_data.pkl'),
                         outlist=[stations, traces_beat])
    logger.info('prepared waveforms from %i stations' % len(stations))
Example #44
0
    width=30., height=30.,
    show_grid=False,
    show_topo=True,
    color_dry=(238, 236, 230),
    topo_cpt_wet='light_sea_uniform',
    topo_cpt_dry='light_land_uniform',
    illuminate=True,
    illuminate_factor_ocean=0.15,
    show_rivers=False,
    show_plates=True)

# Draw some larger cities covered by the map area
m.draw_cities()

# Generate with latitute, longitude and labels of the stations
stations = model.load_stations('stations_deadsea.pf')
lats = [s.lat for s in stations]
lons = [s.lon for s in stations]
labels = ['.'.join(s.nsl()) for s in stations]

# Stations as black triangles. Genuine GMT commands can be parsed by the maps'
# gmt attribute. Last argument of the psxy function call pipes the maps'
# pojection system.
m.gmt.psxy(in_columns=(lons, lats), S='t20p', G='black', *m.jxyr)

# Station labels
for i in range(len(stations)):
    m.add_label(lats[i], lons[i], labels[i])


# Load events from catalog file (generated using catalog.GlobalCMT() to download from www.globalcmt.org)
from pyrocko import pile, io, model, util
import sys

"""
Extrahiere miniseed-Daten um Events, die aus Katalog ausgelesen werden
"""

# mit sys.argv kann man alles abfragen, was hinter dem Programmaufruf steht. 
dirs = sys.argv[1:]

# lade das Stationsfile:
stats = model.load_stations('/scratch/local1/doctar/meta/stations.txt')

# lese Events:
events = model.load_events('/home/zmaw/u254061/master/event_marker_IPMA.txt')

# Erstelle ein pile aus allen miniseed in den Ordnern, die man hinter dem Programmaufruf angegeben hat:
outpile = pile.make_pile(dirs)

f=open('/home/zmaw/u254061/master/event_marker_IPMA.txt')
for line in f:
    if line.lstrip().startswith('#'):
        continue
    toks = line.split()
    timedate, timetime =toks[1], toks[2]
    gtime = util.str_to_time(str(timedate+' '+timetime))

    trange = [gtime-100, gtime+1000]
    new_pile = []
    for traces in outpile.chopper(trange[0], trange[1], load_data=True, degap=False):
        if traces:
matplotlib.rc('font', **font)

fig = plt.figure(figsize=(4,3), dpi=160) #, frameon=False, tight_layout=True)
ax = fig.add_subplot(111)

km = 1000

ttt = collections.defaultdict(dict)

m = cake.load_model('earthmodel_castor.txt')
phases_def =['Pv12.5p', 'P', 'p', 'pP' , 's', 'S']
phases = [cake.PhaseDef(p) for p in phases_def]

event = model.Event(load=pjoin(derec_home, 'mseeds', 'castor',\
                               'castor_event_2013-10-01.dat'))
stations = model.load_stations(pjoin(derec_home, 'mseeds', 'castor', \
                                     'stations.txt'))

stations_distance = collections.defaultdict()
for s in stations:
    d = orthodrome.distance_accurate50m_numpy(s.lat, s.lon, event.lat, event.lon)
    stations_distance[float(d[0])] = s.station


colormap = collections.defaultdict()
num_colors = 9
#num_colors = 12
colors = list(num.linspace(0,254,num_colors))
cmap = plt.get_cmap('hsv')
distances = num.linspace(10, 200,120)*km*cake.m2d
toplot = []
zstart=2000
    store = engine.get_store()
    config = store.config 
    model = config.earthmodel_1d
    stf = STF(magnitude2risetimearea, model=model)

    # Gather these information to create the swarm:
    swarm = Swarm(geometry=geometry,
                  timing=timing,
                  mechanisms=mechanisms,
                  magnitudes=magnitudes,
                  stf=stf)


    # setup stations/targets:
    #stats = load_stations(webnet+'/meta/stations.pf')
    stats = load_stations('stations.pf')
    #stats = []
    # Scrutinize the swarm using matplotlib

    noise = Noise(files='/media/usb/webnet/mseed/2008')

    # convert loaded stations to targets (see function at the top).
    #targets = guess_targets_from_stations(stats)
    targets = get_targets(stations, noise.data_pile, store_id=store_id)
    Visualizer(swarm, stats)

    # Processing that data will return a pyrocko.gf.seismosizer.Reponse object.
    response = engine.process(sources=swarm.get_sources(),
                              targets=targets)

    # Save the events
                    _map.gmt.psmeca(
                        S='%s%g' % ('m', size_cm*2.0),
                        #G=gmtpy.color(colors[e.cluster]),
                        #G=colors[i_e],
                        G='red',
                        C='3p,0/0/0',
                        #W='thinnest,%i/%i/%i' % (255, 255, 255),
                        #L='thinnest,%i/%i/%i' % (255, 255, 255),
                        in_rows=[data],
                        *_map.jxyr)
    _map.save(outpath=outfn)

if __name__=='__main__':
    e = list(Event.load_catalog(filename='event.pf'))[0]
    #stations = model.load_stations('arrays.pf')
    stations = model.load_stations('array_center.pf')
    color_wet = [200, 200, 200]
    color_dry = [253, 253, 253]
    params = MapParameters(lat=e.lat, lon=e.lon, radius=8000000, outfn='array-map-new.pdf',stations=stations, events=[e],
                           show_topo=False,
                           show_grid=False,
                           color_wet=color_wet,
                           color_dry=color_dry)
    make_map(map_parameters=params)
    print '.'*40
    fdomain_station_locs = []
    #with open('northkoreaplot/stations.table.mec', 'r') as f:
    #    for line in f.readlines():
    #        lat, lon, c = line.split()
    #        fdomain_station_locs.append((float(lat), float(lon)))

if __name__=="__main__":
    import argparse
    from pyrocko.model import Event, load_stations

    parser = argparse.ArgumentParser('suggest a store for P phases only')
    parser.add_argument('--stations',
                        help='stations file')
    parser.add_argument('--events',
                        help='event file')
    parser.add_argument('--force',
                        action='store_true',
                        help='force_overwrite')
    parser.add_argument('--superdir',
                        default='.',
                      help='directory where to put the store')
    parser.add_argument('--number_of_distances',
                      help='number of distances between outer grid nodes in GFDB',
                      default=2)

    args = parser.parse_args()

    stations = load_stations(args.stations)
    if len(stations)==1:
        s = stations[0]

    events = list(Event.load_catalog(args.events))

    propose_store(s, events, superdir=args.superdir, force=args.force)
Example #50
0
        data_base_dir = pjoin(derec_home, 'mseeds', 'doctar', 'doctar_2011-11-01')
        stations_file = 'stations.txt'
        event_file = 'doctar_2011-11-01_quakefile.dat'

    elif test_type=='castor':
        stf = [[0.,1.], [0.,1.]]
        store_id = 'castor'
        data_base_dir = pjoin(derec_home, 'mseeds', 'castor')
        stations_file = 'stations.txt'
        event_file = 'castor_event_2013-10-01.dat'

    phase_ids_start = ['p','P','pP']#'|'.join(du.get_tabulated_phases(engine,
                      #                                 store_id, 
                      #                                 ['p','P']))

    stations = model.load_stations(pjoin(data_base_dir, stations_file))

    targets = du.stations2targets(stations, store_id)

    event = model.Event(load=pjoin(data_base_dir, event_file))
    _ref_source = DCSource.from_pyrocko_event(event)
    print 'reference source  magnitude: ', _ref_source.magnitude

    if test_type=='doctar':
        targets = filter(lambda x: x.distance_to(_ref_source)<80000., targets)

    #depths = num.linspace(_ref_source.depth-dz, _ref_source.depth+dz, num_depths)
    offset = 4000

    depths = du.drange(600, 8000, 200)
    print 'depths: ', depths