Beispiel #1
0
def start_seismosizer(gfdb_path=None,
                      gfdb=None,
                      event=None,
                      stations=None,
                      receivers=None,
                      local_interpolation='bilinear',
                      spacial_undersampling=[1, 1],
                      effective_dt=1,
                      crustal_thickness_limit=None,
                      constraining_planes=None,
                      hosts=['localhost'],
                      balance_method='123321',
                      verbose=False):

    assert (stations is None) != (
        receivers is
        None), 'either `receivers` or `stations` argument should be given.'
    assert (gfdb_path is None) != (
        gfdb is None), 'either `gfdb_path` or `gfdb` argument should be given.'

    if gfdb_path is not None:
        database = gfdb_mod.Gfdb(gfdb_path)
    else:
        database = gfdb

    seis = seismosizer.Seismosizer(hosts, balance_method)
    if verbose: seis.set_verbose('T')
    seis.set_database(database)
    seis.set_effective_dt(effective_dt)
    seis.set_local_interpolation(local_interpolation)
    seis.set_spacial_undersampling(*spacial_undersampling)
    seis.set_source_location(event.lat, event.lon, event.time)

    if crustal_thickness_limit is not None:
        seis.set_source_crustal_thickness_limit(crustal_thickness_limit)

    if constraining_planes is not None:
        values = []
        for plane in constraining_planes:
            for vect in plane:
                values.extend(vect)

        seis.set_source_constraints(*values)

    if stations is not None:
        receivers = [
            station_to_receiver(station, kiwi_component_map='default')
            for station in stations
        ]

    seis.set_receivers(receivers)

    return seis
Beispiel #2
0
    def __init__(self):
        # Set up receiver configuration.

        tab = '''
        HH1  58.500 12.5000  0
        HH2  48.500 12.5000  0
        HH3  48.500  3.5000  0
        HH4  58.500  3.5000  0
        '''.strip()

        receivers = []
        for line_tab in tab.split('\n'):
            station, lat, lon, depth = line_tab.split()
            r = receiver.Receiver(lat,
                                  lon,
                                  components='neu',
                                  name='.%s.' % station)
            receivers.append(r)

        stations = receivers_to_stations(receivers)
        model.dump_stations(stations, 'reference_stations.txt')

        # Composition of the source
        self.olat, self.olon = 52.0000, 9.00000
        self.otime = util.str_to_time('1986-08-22 07:00:00')

        # The gfdb can be chosen within snuffler.
        # This refers to the 'add_parameter' method.
        db = gfdb.Gfdb('fomostos/local1/local1')

        seis = seismosizer.Seismosizer(hosts=['localhost'])
        seis.set_database(db)
        seis.set_effective_dt(db.dt)
        seis.set_local_interpolation('bilinear')
        seis.set_receivers(receivers)
        seis.set_source_location(self.olat, self.olon, self.otime)
        seis.set_source_constraints(0, 0, 0, 0, 0, -1)
        self.seis = seis
Beispiel #3
0
    1.2 0.7
    1.3 0.8
    1.4 0.9
    1.5 1
    1.6 1
    1.7 1
    1.8 1
    1.9 1
    ''')

    feed([
        str(x) for x in pbin('gfdb_build'), 'benchdb', 1, 200, 200, 10, 0.1,
        50., 50., 50., 0.
    ])

    db = gfdb.Gfdb('benchdb')

    p = Popen(
        [pbin('gfdb_build_ahfull'), 'benchdb', 'material.table', 'stf.table'],
        stdin=PIPE)

    for ix in xrange(db.nx):
        print 'distance', db.firstx + ix * db.dx
        for iz in xrange(db.nz):
            x = db.firstx + ix * db.dx
            z = db.firstz + iz * db.dz
            p.stdin.write("%g %g T T\n" % (x, z))
            p.stdin.flush()

    p.stdin.close()
    p.wait()
Beispiel #4
0
def prepare(config, kiwi_config, rapid_config, event_names):

    if config.has('gfdb_path'):
        db = gfdb.Gfdb(config.path('gfdb_path'))
        deltat = db.dt
        min_dist = db.firstx + config.gfdb_margin
        max_dist = db.firstx + (db.nx-1)*db.dx - config.gfdb_margin
    else:
        if config.has('deltat'):
            deltat = config.deltat
        else:
            deltat = None
        db = None
        min_dist = None
        max_dist = None
    
    
    for event_name in event_names:
        
        logger.info('Preparing event %s' % event_name)
        
        config.event_name = event_name
        
        override_event = None

        sw = util.Stopwatch()
        if config.has('seed_volume'):
            try:
                acc = rdseed.SeedVolumeAccess(config.path('seed_volume'))
            except rdseed.SeedVolumeNotFound:
                logger.error('SEED volume not found for event %s' % event_name)
                continue
            
            fne = config.path('seed_volume') + '.event'
            
            if os.path.exists(fne):
                override_event = model.Event(load=fne)
               
        elif config.has('edump_data_dir'):
            acc = edump_access.EventDumpAccess(config.path('edump_data_dir'))
            
            
        elif config.has('custom_accessor'):
            gargs = []
            for arg in config.custom_accessor_args:
                gargs.append( config.mkpath(arg) )
            
            if config.has('plugins_dir'):
                pd = config.path('plugins_dir')
                if pd not in sys.path: sys.path[0:0] = [ pd ]
                
            module_name, class_name = config.custom_accessor
            module = __import__(module_name)
            acc_class = getattr(module, class_name)
            acc = acc_class(*gargs)
            
        else:
            sys.exit('config has neither entry "seed_volume" nor "edump_data_dir" nor "custom_data_accessor"')
            
       
        if override_event:
            event = override_event
        else:
            events = acc.get_events()
            if not events:
                logger.error('No event metainformation found for %s\n' % event_name)
                continue
       
            event = events[0]
        
        event.name = event_name
        
        stations = acc.get_stations(relative_event=event)
        
        chan_count = {}
        
        processed_traces = []
        
        displacement_limit = None
        if config.has('displacement_limit'):
            displacement_limit = config.displacement_limit
                
        extend = None
        if config.has('restitution_pre_extend'):
            extend = config.restitution_pre_extend
        
        crop = True
        if config.has('restitution_crop'):
            crop = config.restitution_crop
        
        projections = None
        if config.has('projection_functions'):
            projections = config.projection_functions
            
        rotations = None
        if config.has('rotation_functions'):
            rotations = config.rotation_functions
        
        whitelist = lambda tr: True
        if config.has('streams_badness_dir') and config.has('streams_badness_limit'):
            badness_dir = config.path('streams_badness_dir')
            badness_limit = config.streams_badness_limit
            badness = get_badness(badness_dir, event.time)
            whitelist = lambda tr: tr.nslc_id in badness and badness[tr.nslc_id] <= badness_limit
            
        station_filter = lambda tr: True
        if config.has('station_filter'):
            station_filter = lambda tr: config.station_filter(stations[get_nsl(tr)])
            
        restitution_off_hack = False
        if config.has('restitution_off_hack'):
            restitution_off_hack = config.restitution_off_hack
            
        trace_selector = lambda tr: station_filter(tr) and whitelist(tr)
        out_stations = {}
        for traces in acc.iter_displacement_traces(
                config.restitution_fade_time, 
                config.restitution_frequencyband,
                deltat=deltat,
                rotations=rotations,
                projections=projections,
                relative_event=event,
                maxdisplacement=displacement_limit,
                allowed_methods=config.restitution_methods,
                trace_selector=trace_selector,
                extend=extend,
                crop=crop,
                out_stations=out_stations,
                restitution_off_hack=restitution_off_hack,
                redundant_channel_priorities=config.get_or_none('redundant_channel_priorities')):
                        
            for tr in traces:
                
                station = stations[get_nsl(tr)]
                
                if min_dist is not None and station.dist_m < min_dist:
                    logger.warn('Station %s is too close to the source (distance = %g m, limit = %g m' % (station.nsl_string(), station.dist_m, min_dist) )
                    continue
                if max_dist is not None and station.dist_m > max_dist:
                    logger.warn('Station %s is too far from the source (distance = %g m, limit = %g m' % (station.nsl_string(), station.dist_m, max_dist) )
                    continue
                span_complete = True
                
                timings = []
                if config.has('check_span'):
                    timings = config.check_span
                    
                if config.has('cut_span'):
                    timings.extend(config.cut_span)
                
                for timing in timings:
                    tt = timing(station.dist_m, event.depth)
                    if tt is None:
                        logger.warn('Trace does not contain all required arrivals: %s.%s.%s.%s (timing not present)' % tr.nslc_id)
                        span_complete = False
                        break
                    
                    arrival_time = event.time + tt
                    if not (tr.tmin <= arrival_time and arrival_time <= tr.tmax):
                        what = arrival_time
                        logger.warn('Trace does not contain all required arrivals: %s.%s.%s.%s (timing not in trace)' % tr.nslc_id)
                        acc.problems().add('gappy', tr.full_id)
                        span_complete = False
                        break
                
                if not span_complete:
                    continue
                
                if config.has('cut_span'):
                    cs = config.cut_span
                    tmin, tmax = (event.time+cs[0](station.dist_m, event.depth),
                                event.time+cs[1](station.dist_m, event.depth))
                                
                    tr.chop(tmin, tmax, inplace=True)
                processed_traces.append(tr)
                
                if tr.channel not in chan_count:
                    chan_count[tr.channel] = 0
                    
                chan_count[tr.channel] += 1
                
        # use only one sensor at each station; use lexically first
        sstations = out_stations.values()
        sstations.sort( lambda a,b: cmp(get_nsl(a),  get_nsl(b)) )
        xstations = {}
        have = {}
        for station in sstations:
            if get_ns(station) not in have:
                have[get_ns(station)] = 1
                xstations[get_nsl(station)] = station
        
        if kiwi_config is not None:
            save_kiwi_dataset(acc, xstations, processed_traces, event, kiwi_config)
        
        if rapid_config is not None:
            save_rapid_dataset(acc, xstations, processed_traces, event, rapid_config)
        
        for k,v in chan_count.iteritems():
            logger.info( 'Number of displacement traces for channel %s: %i\n' % (k,v) )
            
        if config.has('raw_trace_path'):
            io.save(acc.get_pile().all(trace_selector=trace_selector), config.path('raw_trace_path'))
            
        if config.has('problems_file'):
            acc.problems().dump(config.path('problems_file'))
            
        logger.info( 'Stopwatch: %5.1f s' % sw() )
Beispiel #5
0
def command_export(args):
    from subprocess import Popen, PIPE

    try:
        from tunguska import gfdb
    except ImportError as err:
        die('the kiwi tools must be installed to use this feature', err)

    def setup(parser):
        parser.add_option('--nchunks',
                          dest='nchunks',
                          type='int',
                          default=1,
                          metavar='N',
                          help='split output gfdb into N chunks')

    parser, options, args = cl_parse('export', args, setup=setup)

    show_progress = True

    if len(args) not in (1, 2):
        parser.print_help()
        sys.exit(1)

    target_path = args.pop()
    if os.path.isdir(target_path):
        target_path = os.path.join(target_path, 'kiwi_gfdb')
        logger.warn('exported gfdb will be named as "%s.*"' % target_path)

    source_store_dir = get_store_dir(args)

    source = gf.Store(source_store_dir, 'r')
    config = source.config

    if not isinstance(config, gf.meta.ConfigTypeA):
        die('only stores of type A can be exported to Kiwi format')

    if os.path.isfile(target_path + '.index'):
        die('destation already exists')

    cmd = [
        str(x) for x in [
            'gfdb_build', target_path, options.nchunks, config.ndistances,
            config.nsource_depths, config.ncomponents, config.deltat,
            config.distance_delta, config.source_depth_delta,
            config.distance_min, config.source_depth_min
        ]
    ]

    p = Popen(cmd, stdin=PIPE)
    p.communicate()

    out_db = gfdb.Gfdb(target_path)

    if show_progress:
        pbar = util.progressbar('exporting',
                                config.nrecords / config.ncomponents)

    for i, (z, x) in enumerate(config.iter_nodes(level=-1)):

        data_out = []
        for ig in range(config.ncomponents):
            try:
                tr = source.get((z, x, ig), interpolation='off')
                data_out.append((tr.t, tr.data * config.factor))

            except gf.store.StoreError as e:
                logger.warn('cannot get %s, (%s)' % (sindex((z, x, ig)), e))
                data_out.append(None)

        # put a zero valued sample to no-data zero-traces at a compatible time
        tmins = [
            entry[0][0] for entry in data_out
            if entry is not None and entry[0].size != 0
        ]

        if tmins:
            tmin = min(tmins)
            for entry in data_out:
                if entry is not None and entry[0].size == 0:
                    entry[0].resize(1)
                    entry[1].resize(1)
                    entry[0][0] = tmin
                    entry[1][0] = 0.0

        out_db.put_traces_slow(x, z, data_out)

        if show_progress:
            pbar.update(i + 1)

    if show_progress:
        pbar.finish()

    source.close()
Beispiel #6
0
def command_import(args):
    try:
        from tunguska import gfdb
    except ImportError:
        die('the kiwi tools must be installed to use this feature')

    parser, options, args = cl_parse('import', args)

    show_progress = True

    if not len(args) == 2:
        parser.print_help()
        sys.exit(1)

    source_path, dest_store_dir = args

    if os.path.isdir(source_path):
        source_path = pjoin(source_path, 'db')

    source_path = re.sub(r'(\.\d+\.chunk|\.index)$', '', source_path)

    db = gfdb.Gfdb(source_path)

    config = gf.meta.ConfigTypeA(id='imported_gfs',
                                 distance_min=db.firstx,
                                 distance_max=db.firstx + (db.nx - 1) * db.dx,
                                 distance_delta=db.dx,
                                 source_depth_min=db.firstz,
                                 source_depth_max=db.firstz +
                                 (db.nz - 1) * db.dz,
                                 source_depth_delta=db.dz,
                                 sample_rate=1.0 / db.dt,
                                 ncomponents=db.ng)

    try:
        gf.store.Store.create(dest_store_dir, config=config)
        dest = gf.Store(dest_store_dir, 'w')
        if show_progress:
            pbar = util.progressbar(
                'importing', dest.config.nrecords / dest.config.ncomponents)

        for i, args in enumerate(dest.config.iter_nodes(level=-1)):
            source_depth, distance = [float(x) for x in args]
            traces = db.get_traces_pyrocko(distance, source_depth)
            ig_to_trace = dict((tr.meta['ig'] - 1, tr) for tr in traces)
            for ig in range(db.ng):
                if ig in ig_to_trace:
                    tr = ig_to_trace[ig]
                    gf_tr = gf.store.GFTrace(tr.get_ydata(),
                                             int(round(tr.tmin / tr.deltat)),
                                             tr.deltat)

                else:
                    gf_tr = gf.store.Zero

                dest.put((source_depth, distance, ig), gf_tr)

            if show_progress:
                pbar.update(i + 1)

        if show_progress:
            pbar.finish()

        dest.close()

    except gf.StoreError as e:
        die(e)
Beispiel #7
0
    def make_seismosizer(
        self,
        gfdb_path=None,
        gfdb=None,
        local_interpolation='bilinear',
        spacial_undersampling=[1, 1],
        effective_dt=1,
        crustal_thickness_limit=None,
        constraining_planes=None,
        shifts=None,
        blacklist=None,
        xblacklist=None,
        hosts=['localhost'],
        balance_method='123321',
        verbose=False,
    ):

        assert (gfdb_path is None) != (
            gfdb is
            None), 'either `gfdb_path` or `gfdb` argument should be given'

        if gfdb_path is not None:
            database = gfdb_mod.Gfdb(gfdb_path)
        else:
            database = gfdb

        seis = seismosizer.Seismosizer(hosts, balance_method)
        if verbose: seis.set_verbose('T')
        seis.set_database(database)
        seis.set_effective_dt(effective_dt)
        seis.set_local_interpolation(local_interpolation)
        seis.set_spacial_undersampling(*spacial_undersampling)
        seis.set_source_location(*self.get_source_location())

        if crustal_thickness_limit is not None:
            seis.set_source_crustal_thickness_limit(crustal_thickness_limit)

        if constraining_planes is not None:
            values = []
            for plane in constraining_planes:
                for vect in plane:
                    values.extend(vect)

            seis.set_source_constraints(*values)

        seis.set_receivers(self.get_receivers())

        if self._have_observations:
            self._ref_seismogram_stem = seis.tempdir + '/reference-from-eventdata'
            self.put_ref_seismograms()
            seis.set_ref_seismograms(self._ref_seismogram_stem, 'mseed')

        if blacklist:
            seis.blacklist_receivers(blacklist)
        if xblacklist:
            seis.xblacklist_receivers(xblacklist)

        # apply reference seismograms shifts
        if self._have_observations:
            if shifts is not None:
                seis.shift_ref_seismograms(shifts)

        return seis
Beispiel #8
0
def kiwi_setup(options, config, event_names):
    from tunguska import prepare, gfdb

    conf = config['iris_pull_config']
    kiwi_conf = config['kiwi_config']

    if kiwi_conf.has('gfdb_path'):
        db = gfdb.Gfdb(kiwi_conf.path('gfdb_path'))
        deltat = db.dt
    else:
        if kiwi_conf.has('deltat'):
            deltat = kiwi_conf.deltat
        else:
            deltat = None
        db = None

    if not event_names:
        sys.exit('need event name')

    for event_name in event_names:
        conf.event_name = event_name
        kiwi_conf.event_name = event_name

        event = _get_event_infos(conf)
        stations = _get_stations(conf)
        for station in stations:
            station.set_event_relative_data(event)

        traces = _get_prepared_traces(conf, stations)
        raw_traces = _get_raw_traces(conf, stations)

        p = pile.Pile()
        buf = pile.MemTracesFile(None, traces)
        p.add_file(buf)

        processed = []
        for station in stations:

            tt1 = kiwi_conf.cut_span[0](station.dist_m, event.depth)
            tt2 = kiwi_conf.cut_span[1](station.dist_m, event.depth)
            if None in (tt1, tt2):
                continue

            tmin = tt1 + event.time
            tmax = tt2 + event.time

            traces = p.all(
                tmin=tmin,
                tmax=tmax,
                want_incomplete=False,
                trace_selector=lambda tr: get_nsl(tr) == get_nsl(station))

            for proj, in_channels, out_channels in station.guess_projections_to_rtu(
                    out_channels=('R', 'T', 'Z')):
                proc = trace.project(traces, proj, in_channels, out_channels)
                processed.extend(proc)
                for tr in proc:
                    for ch in out_channels:
                        if ch.name == tr.channel:
                            station.add_channel(ch)

        for tr in processed:
            if deltat is not None:
                try:
                    tr.downsample_to(deltat, snap=True, allow_upsample_max=5)
                except util.UnavailableDecimation, e:
                    logger.warn('Cannot downsample %s.%s.%s.%s: %s' %
                                (tr.nslc_id + (e, )))
                    continue

        stations_by_nsl = dict((get_nsl(s), s) for s in stations)

        acc = DummyAcc(raw_traces)
        prepare.save_kiwi_dataset(acc, stations_by_nsl, processed, event,
                                  kiwi_conf)