示例#1
0
    def testGlobalCMT(self):
        def is_the_haiti_event(ev):
            assert near(ev.magnitude, 7.0, 0.1)
            assert near(ev.lat, 18.61, 0.01)
            assert near(ev.lon, -72.62, 0.01)
            assert near(ev.depth, 12000., 1.)
            assert ev.region.lower() == 'haiti region'

        cat = catalog.GlobalCMT()

        tmin = util.str_to_time('2010-01-12 21:50:00')
        tmax = util.str_to_time('2010-01-13 03:17:00')

        names = cat.get_event_names(time_range=(tmin, tmax), magmin=5.)
        ident = None
        for name in names:
            ev = cat.get_event(name)
            if ev.magnitude > 7:
                is_the_haiti_event(ev)
                ident = ev.name

        assert ident is not None
        cat.flush()
        ev = cat.get_event(ident)
        is_the_haiti_event(ev)
def read_data(fn):
    """Read a buletin from the IG CAS website.

    Creates objects of type ResidualMarker (subclass of pyrocko.gui_util.PhaseMarker) and pyrocko.model.Event.
    """
    picks = []
    events = []
    with open(fn, 'r') as f:
        for line in f.readlines():
            odate, eventid, otime, lon, _, lat, _, _, depth, _, _, mag, stat, phase_id, polarity, date, t, a = line.split()
            otime = util.str_to_time('%s %s'%(odate, otime))
            event = model.Event(lat=float(lat), 
                                    lon=float(lon),
                                    depth=float(depth),
                                    time=otime)
            t_pick = util.str_to_time('%s %s'%(date, t))
            pick = ResidualMarker(event=event,
                                  tmin=float(t_pick),
                                  tmax=float(t_pick), 
                                  nslc_ids=[('', stat, '', '*'),],
                                  phasename=phase_id,
                                  polarity=int(polarity),
                                  event_time=otime,
                                  event_hash=event.get_hash())
            
            if abs(t_pick-otime)>30:
                continue
            picks.append(pick)
            events.append(event)

    return picks, events
示例#3
0
    def testUSGS(self):

        def is_the_haiti_event(ev):
            assert near(ev.magnitude, 7.0, 0.1)
            assert near(ev.lat, 18.443, 0.01)
            assert near(ev.lon, -72.571, 0.01)
            assert near(ev.depth, 13000., 1.)

        cat = catalog.USGS()

        tmin = util.str_to_time('2010-01-12 21:50:00')
        tmax = util.str_to_time('2010-01-13 03:17:00')

        names = cat.get_event_names(time_range=(tmin, tmax), magmin=5.)
        assert len(names) == 13
        for name in names:
            ev = cat.get_event(name)
            if ev.magnitude >= 7.:
                is_the_haiti_event(ev)
                ident = ev.name

        assert ident is not None
        cat.flush()
        ev = cat.get_event(ident)
        is_the_haiti_event(ev)
示例#4
0
    def testUSGS(self):

        def is_the_haiti_event(ev):
            assert near(ev.magnitude, 7.0, 0.1)
            assert near(ev.lat, 18.443, 0.01)
            assert near(ev.lon, -72.571, 0.01)
            assert near(ev.depth, 13000., 1.)

        cat = catalog.USGS()

        tmin = util.str_to_time('2010-01-12 21:50:00')
        tmax = util.str_to_time('2010-01-13 03:17:00')

        names = cat.get_event_names(time_range=(tmin, tmax), magmin=5.)
        assert len(names) == 13
        for name in names:
            ev = cat.get_event(name)
            if ev.magnitude >= 7.:
                is_the_haiti_event(ev)
                ident = ev.name

        assert ident is not None
        cat.flush()
        ev = cat.get_event(ident)
        is_the_haiti_event(ev)
def read_enhanced_sac_pz(filename):
    zeros, poles, constant, comments = pz.read_sac_zpk(filename=filename, get_comments=True)
    d = {}
    for line in comments:
        toks = line.split(':', 1)
        if len(toks) == 2:
            temp = toks[0].strip('* \t')
            for k in ('network', 'station', 'location', 'channel', 'start', 'end', 
                      'latitude', 'longitude', 'depth', 'elevation', 'dip', 'azimuth',
                      'input unit', 'output unit'):
                if temp.lower().startswith(k):
                    d[k] = toks[1].strip()

    response = trace.PoleZeroResponse(zeros, poles, constant)

    try:
        channel = Channel(
            nslc=(d['network'], d['station'], d['location'], d['channel']),
            tmin=util.str_to_time(d['start'], format='%Y-%m-%dT%H:%M:%S'),
            tmax=util.str_to_time(d['end'], format='%Y-%m-%dT%H:%M:%S'),
            lat=float(d['latitude']),
            lon=float(d['longitude']),
            elevation=float(d['elevation']),
            depth=float(d['depth']),
            dip=float(d['dip']),
            azimuth=float(d['azimuth']),
            input_unit=d['input unit'],
            output_unit=d['output unit'],
            response=response)
    except:
        raise EnhancedSacPzError('cannot get all required information from file %s' % filename)

    return channel
示例#6
0
def gen_random_tectonic_event(
    scenario_id,
    magmin=-0.5,
    magmax=3.,
    depmin=5,
    depmax=10,
    latmin=49.09586,
    latmax=49.25,
    lonmin=8.0578,
    lonmax=8.20578,
    timemin=util.str_to_time('2007-01-01 16:10:00.000'),
    timemax=util.str_to_time('2020-01-01 16:10:00.000')):

    name = "scenario" + str(scenario_id)
    depth = rand(depmin, depmax) * km
    magnitude = rand(magmin, magmax)
    lat = randlat(latmin, latmax)
    lon = rand(lonmin, lonmax)
    time = rand(timemin, timemax)
    event = model.Event(name=name,
                        lat=lat,
                        lon=lon,
                        magnitude=magnitude,
                        depth=depth,
                        time=time)

    return event
示例#7
0
    def testGlobalCMT(self):

        def is_the_haiti_event(ev):
            assert near(ev.magnitude, 7.0, 0.1)
            assert near(ev.lat, 18.61, 0.01)
            assert near(ev.lon, -72.62, 0.01)
            assert near(ev.depth, 12000., 1.)
            assert ev.region.lower() == 'haiti region'

        cat = catalog.GlobalCMT()

        tmin = util.str_to_time('2010-01-12 21:50:00')
        tmax = util.str_to_time('2010-01-13 03:17:00')

        names = cat.get_event_names(time_range=(tmin, tmax), magmin=5.)
        ident = None
        for name in names:
            ev = cat.get_event(name)
            if ev.magnitude > 7:
                is_the_haiti_event(ev)
                ident = ev.name

        assert ident is not None
        cat.flush()
        ev = cat.get_event(ident)
        is_the_haiti_event(ev)
def readandsplit(infile, box, stations):
   '''
    Load phase files and store data in a myStation.
    Phasename is retrieved from file name: *_*_Phasename, 
    where * can be anything.
    :param infile:      name of the file which is to be read as string.
    :param box:         instance of myStationBox, containing myStation instances.
    :param stations:    File as used with snuffler to import station information.
   '''
   refevent = str_to_time("2010-04-11 22:08:15.500")
   phasename = infile.split('_')[2]
   phasename = phasename.split('.')[0]

   phases = open(infile,'r').readlines()[1::]
   for phase in phases:
       date,time,length,NSLC = phase.split()
       arrival = str_to_time(date+' '+time)-refevent
       netw,stat,loc,comp = NSLC.split('.')
       
       # check if card exists, and if it does, add phase to that card:
       if(box.stationInBox(netw,stat)):
           tmpCard = box.getStationCardForNetworkStation(netw,stat)
           tmpCard.setPhase(phasename,arrival)                               
       # if not, create a new card:
       else:
           newCard=myStation(network=netw,station=stat)
           newCard.setPhase(phasename,arrival)
           newCard.setLatLonEleFromStation(stations)
           box.addStationCard(newCard)
示例#9
0
    def testTimeError(self):
        ok = False
        try:
            util.str_to_time('abc')
        except util.TimeStrError:
            ok = True

        assert ok
    def testTimeError(self):
        ok = False
        try:
            util.str_to_time('abc')
        except util.TimeStrError:
            ok = True

        assert ok
def dummy_aware_str_to_time(s, time_format='%Y-%m-%dT%H:%M:%S'):
    try:
        util.str_to_time(s, format=time_format)
    except util.TimeStrError:
        year = int(s[:4])
        if year > this_year + 100:
            return None  # StationXML contained a dummy end date

        raise
示例#12
0
 def testGeofonMT(self):
     cat = catalog.Geofon()
     tmin = util.str_to_time('2014-01-01 00:00:00')
     tmax = util.str_to_time('2017-01-01 00:00:00')
     events = cat.get_events((tmin, tmax), magmin=8)
     self.assertEqual(len(events), 2)
     mt1, mt2 = [ev.moment_tensor for ev in events]
     angle = moment_tensor.kagan_angle(mt1, mt2)
     self.assertEqual(round(angle - 7.7, 1), 0.0)
示例#13
0
 def testGeofonMT(self):
     cat = catalog.Geofon()
     tmin = util.str_to_time('2014-01-01 00:00:00')
     tmax = util.str_to_time('2017-01-01 00:00:00')
     events = cat.get_events((tmin, tmax), magmin=8)
     self.assertEqual(len(events), 2)
     mt1, mt2 = [ev.moment_tensor for ev in events]
     angle = moment_tensor.kagan_angle(mt1, mt2)
     self.assertEqual(round(angle - 7.7, 1), 0.0)
示例#14
0
def dummy_aware_str_to_time(s):
    try:
        util.str_to_time(s, format='%Y-%m-%dT%H:%M:%S')
    except util.TimeStrError:
        year = int(s[:4])
        if year > this_year + 100:
            return None  # StationXML contained a dummy end date

        raise
示例#15
0
    def test_scenario_combinations(self):

        generator = scenario.ScenarioGenerator(
            seed=20,
            center_lat=42.6,
            center_lon=13.3,
            radius=60*km,
            target_generators=[
                targets.WaveformGenerator(
                    store_id=ScenarioTestCase.store_id,
                    station_generator=targets.RandomStationGenerator(
                        avoid_water=False),
                    noise_generator=targets.waveform.WhiteNoiseGenerator(),
                    seismogram_quantity='velocity'),
                targets.InSARGenerator(
                    resolution=(20, 20),
                    noise_generator=targets.insar.AtmosphericNoiseGenerator(
                        amplitude=1e-5)),
                targets.GNSSCampaignGenerator(
                    station_generator=targets.RandomStationGenerator(
                        avoid_water=False,
                        channels=None))
                ],
            source_generator=scenario.DCSourceGenerator(
                time_min=util.str_to_time('2017-01-01 00:00:00'),
                time_max=util.str_to_time('2017-01-01 02:00:00'),
                radius=10*km,
                depth_min=1*km,
                depth_max=10*km,
                magnitude_min=3.0,
                magnitude_max=4.5,
                strike=120.,
                dip=45.,
                rake=90.,
                perturbation_angle_std=15.,
                nevents=3)
        )

        engine = gf.get_engine()
        generator.init_modelling(engine)

        for src in scenario.sources.AVAILABLE_SOURCES:
            generator.source_generator = src(
                time_min=util.str_to_time('2017-01-01 00:00:00'),
                time_max=util.str_to_time('2017-01-01 02:00:00'),
                radius=1*km,
                depth_min=1.5*km,
                depth_max=5*km,
                magnitude_min=3.0,
                magnitude_max=4.5)
            generator.source_generator.update_hierarchy(generator)

            generator.get_stations()
            generator.get_waveforms()
            generator.get_insar_scenes()
            generator.get_gnss_campaigns()
示例#16
0
def command_init(args):
    '''
    Execution of command init
    '''
    def setup(parser):

        parser.add_option('--force',
                          dest='force',
                          action='store_true',
                          help='overwrite existing project directory')

    parser, options, args = cl_parse('init', args, setup)

    if len(args) != 1:
        help_and_die(parser, 'missing argument')
    else:
        fn_config = args[0]

    if not os.path.isfile(fn_config):
        die('config file missing: %s' % fn_config)

    conf = load(filename=fn_config)
    config.check(conf)

    if ((not options.force) and (os.path.isdir(conf.project_dir))):
        die('project dir exists: %s; use force option' % conf.project_dir)
    else:
        if os.path.isdir(conf.project_dir):
            shutil.rmtree(conf.project_dir)
        os.mkdir(conf.project_dir)
        conf.dump(filename=os.path.join(conf.project_dir, 'seiscloud.config'))

        dst = os.path.join(conf.project_dir, 'catalog.pf')

        if conf.catalog_origin == 'file':
            src = conf.catalog_fn
            shutil.copyfile(src, dst)
        else:
            if conf.catalog_origin == 'globalcmt':
                orig_catalog = catalog.GlobalCMT()
            else:  # geofon
                orig_catalog = catalog.Geofon()
            events = orig_catalog.get_events(time_range=(util.str_to_time(
                conf.tmin), util.str_to_time(conf.tmax)),
                                             magmin=conf.magmin,
                                             latmin=conf.latmin,
                                             latmax=conf.latmax,
                                             lonmin=conf.lonmin,
                                             lonmax=conf.lonmax)

            selevents = [ev for ev in events if ev.magnitude <= conf.magmax]
            model.dump_events(selevents, dst)

        print('Project directory prepared "%s"' % conf.project_dir)
示例#17
0
class SourceGenerator(LocationGenerator):

    nevents = Int.T(default=2)
    avoid_water = Bool.T(
        default=False, help='Avoid sources offshore under the ocean / lakes.')

    radius = Float.T(default=10 * km)

    time_min = Timestamp.T(default=util.str_to_time('2017-01-01 00:00:00'))
    time_max = Timestamp.T(default=util.str_to_time('2017-01-03 00:00:00'))

    magnitude_min = Float.T(default=4.0)
    magnitude_max = Float.T(default=0.0)
    b_value = Float.T(optional=True,
                      help='Gutenberg Richter magnitude distribution.')

    def __init__(self, *args, **kwargs):
        super(SourceGenerator, self).__init__(*args, **kwargs)
        if self.b_value and self.magnitude_max:
            raise Exception('b_value and magnitude_max are mutually exclusive')

    def draw_magnitude(self, rstate):
        if self.b_value is None:
            return rstate.uniform(self.magnitude_min, self.magnitude_max)
        else:
            return moment_tensor.rand_to_gutenberg_richter(
                rstate.rand(), self.b_value, magnitude_min=self.magnitude_min)

    def get_sources(self):
        sources = []
        for ievent in range(self.nevents):
            src = self.get_source(ievent)
            src.name = 'scenario_ev%03d' % (ievent + 1)
            sources.append(src)

        return sources

    def dump_data(self, path):
        fn_sources = op.join(path, 'sources.yml')
        with open(fn_sources, 'w') as f:
            for src in self.get_sources():
                f.write(src.dump())

        fn_events = op.join(path, 'events.txt')
        with open(fn_events, 'w') as f:
            for isrc, src in enumerate(self.get_sources()):
                f.write(src.pyrocko_event().dump())

        return [fn_events, fn_sources]

    def add_map_artists(self, automap):
        pass
示例#18
0
 def testTime(self):
     
     for fmt, accu in zip(
         [ '%Y-%m-%d %H:%M:%S.3FRAC', '%Y-%m-%d %H:%M:%S.2FRAC', '%Y-%m-%d %H:%M:%S.1FRAC', '%Y-%m-%d %H:%M:%S' ],
         [ 0.001, 0.01, 0.1, 1.] ):
     
         ta = util.str_to_time('1960-01-01 10:10:10')
         tb = util.str_to_time('2020-01-01 10:10:10')
         
         for i in xrange(10000):
             t1 = ta + random() * (tb-ta)
             s = util.time_to_str(t1, format=fmt)
             t2 = util.str_to_time(s, format=fmt)
             assert abs( t1 - t2 ) < accu
示例#19
0
    def testIterTimes(self):

        tmin = util.str_to_time('1999-03-20 20:10:10')
        tmax = util.str_to_time('2001-05-20 10:00:05')

        ii = 0
        for ymin, ymax in util.iter_years(tmin, tmax):
            for mmin, mmax in util.iter_months(ymin, ymax):
                ii += 1
                s1 = util.time_to_str(mmin)
                s2 = util.time_to_str(mmax)

        assert ii == 12 * 3
        assert s1 == '2001-12-01 00:00:00.000'
        assert s2 == '2002-01-01 00:00:00.000'
示例#20
0
    def testTime(self):

        for fmt, accu in zip([
                '%Y-%m-%d %H:%M:%S.3FRAC', '%Y-%m-%d %H:%M:%S.2FRAC',
                '%Y-%m-%d %H:%M:%S.1FRAC', '%Y-%m-%d %H:%M:%S'
        ], [0.001, 0.01, 0.1, 1.]):

            ta = util.str_to_time('1960-01-01 10:10:10')
            tb = util.str_to_time('2020-01-01 10:10:10')

            for i in xrange(10000):
                t1 = ta + random() * (tb - ta)
                s = util.time_to_str(t1, format=fmt)
                t2 = util.str_to_time(s, format=fmt)
                assert abs(t1 - t2) < accu
    def testIterTimes(self):

        tmin = util.str_to_time('1999-03-20 20:10:10')
        tmax = util.str_to_time('2001-05-20 10:00:05')

        ii = 0
        for ymin, ymax in util.iter_years(tmin, tmax):
            for mmin, mmax in util.iter_months(ymin, ymax):
                ii += 1
                s1 = util.time_to_str(mmin)
                s2 = util.time_to_str(mmax)

        assert ii == 12*3
        assert s1 == '2001-12-01 00:00:00.000'
        assert s2 == '2002-01-01 00:00:00.000'
    def __init__(self):
        # Set up receiver configuration.

        tab = '''
        HH1  58.500 12.5000  0
        HH2  48.500 12.5000  0
        HH3  48.500  3.5000  0
        HH4  58.500  3.5000  0
        '''.strip()

        receivers = []
        for line_tab in tab.split('\n'):
            station, lat, lon, depth = line_tab.split()
            r = receiver.Receiver(lat, lon, components='neu', name='.%s.' % station)
            receivers.append(r)

        stations = receivers_to_stations(receivers)
        model.dump_stations(stations, 'reference_stations.txt')

        # Composition of the source
        self.olat, self.olon = 52.0000, 9.00000
        self.otime = util.str_to_time('1986-08-22 07:00:00')

        # The gfdb can be chosen within snuffler.
        # This refers to the 'add_parameter' method.
        db = gfdb.Gfdb('fomostos/local1/local1')

        seis = seismosizer.Seismosizer(hosts=['localhost'])
        seis.set_database(db)
        seis.set_effective_dt(db.dt)
        seis.set_local_interpolation('bilinear')
        seis.set_receivers(receivers)
        seis.set_source_location(self.olat, self.olon, self.otime)
        seis.set_source_constraints(0, 0, 0, 0, 0, -1)
        self.seis = seis
    def runParallel(inmodel):
        
        print 'does not work properly. EXIT'
        sys.exit(0)
        ProBar = progressbar.ProgressBar(maxval=iterations).start()
        misfits={}
        misfits['pMF']=[]; misfits['sMF']=[]
        misfits['ScsMF']=[]; misfits['ScssMF']=[]

        loadmod=cake.load_model(inmodel)
     
        for latindx, lat in enumerate(_lats):
            for lonindx, lon in enumerate(_lons):
                for zindex, z in enumerate(_depths):
                    #iteration+=1
                    # Start prozess with one event (depth), and one model:
                    eve=model.Event(lat,lon,str_to_time("2010-04-11 22:08:15.500"), 
                       "Spain_Durcal" , z,6.3)
                    [ttpdiff, ttsdiff, ttScsdiff, ttScssdiff] = depthfinder.startup(loadmod, eve, maxdist) 
                   
                    pMF, sMF, ScsMF, ScssMF= map(
                               lambda x: calculateMisfit(x,maxdist), [ttpdiff,ttsdiff,ttScsdiff,ttScssdiff])
                                               
                    resultArray[latindx][lonindx][zindex] = [pMF, sMF, ScsMF, ScssMF]
                    # update progressbar
                    ProBar.update(iteration)
                    identifierstring = inmodel+'.%s.%s.%s'%(lat, lon, z)
                   
                    results[identifierstring]=misfits
        
        try:
            output = open('results.p','w')
            pickle.dump(results, output)
        finally:
            output.close()
示例#24
0
    def test_evalresp(self, plot=False):

        resp_fpath = common.test_data_file('test2.resp')

        freqs = num.logspace(num.log10(0.001), num.log10(10.), num=1000)

        transfer = evalresp.evalresp(
            sta_list='BSEG',
            cha_list='BHZ',
            net_code='GR',
            locid='',
            instant=util.str_to_time('2012-01-01 00:00:00'),
            freqs=freqs,
            units='DIS',
            file=resp_fpath,
            rtype='CS')[0][4]

        pz_fpath = common.test_data_file('test2.sacpz')

        zeros, poles, constant = pz.read_sac_zpk(pz_fpath)

        resp = trace.PoleZeroResponse(zeros, poles, constant)

        transfer2 = resp.evaluate(freqs)

        if plot:
            plot_tfs(freqs, [transfer, transfer2])

        assert numeq(transfer, transfer2, 1e-4)
示例#25
0
    def __init__(self,
                 structures=[],
                 name='',
                 date=0.,
                 inversion_type='space',
                 m=1.,
                 sigmam=0.,
                 prior_dist='Unif'):
        pattern.__init__(self, name, date, inversion_type, m, sigmam,
                         prior_dist)

        self.t0 = time2dec(date)[0]
        self.seismo = True

        # segments associated to kernel
        self.structures = structures
        if len(self.structures) > 0:
            inversion_type = 'space'
        self.Mstr = len(self.structures)
        # each structures can have several segments
        self.Mseg = sum(map((lambda x: getattr(x, 'Mseg')), self.structures))
        segments = []
        segments.append(
            map((lambda x: getattr(x, 'segments')), self.structures))
        self.segments = flatten(segments)
        # set time event for all patch
        map((lambda x: setattr(x, 'time', util.str_to_time(self.date))),
            self.segments)
示例#26
0
def pdate(s):
    if s.startswith('2599') or s.startswith('2999'):
        return None
    elif s.lower().startswith('no'):
        return None
    else:
        return util.str_to_time(s, format='%Y,%j,%H:%M:%S.OPTFRAC')
def iload_fh(f, time_format='%Y-%m-%dT%H:%M:%S'):
    zeros, poles, constant, comments = pz.read_sac_zpk(file=f,
                                                       get_comments=True)
    d = {}
    for line in comments:
        toks = line.split(':', 1)
        if len(toks) == 2:
            temp = toks[0].strip('* \t')
            for k in ('network', 'station', 'location', 'channel', 'start',
                      'end', 'latitude', 'longitude', 'depth', 'elevation',
                      'dip', 'azimuth', 'input unit', 'output unit'):

                if temp.lower().startswith(k):
                    d[k] = toks[1].strip()

    response = trace.PoleZeroResponse(zeros, poles, constant)

    try:
        yield EnhancedSacPzResponse(
            codes=(d['network'], d['station'], d['location'], d['channel']),
            tmin=util.str_to_time(d['start'], format=time_format),
            tmax=dummy_aware_str_to_time(d['end']),
            lat=float(d['latitude']),
            lon=float(d['longitude']),
            elevation=float(d['elevation']),
            depth=float(d['depth']),
            dip=float(d['dip']),
            azimuth=float(d['azimuth']),
            input_unit=d['input unit'],
            output_unit=d['output unit'],
            response=response)
    except KeyError as e:
        raise EnhancedSacPzError(
            'cannot get all required information "%s"' % e.args[0])
示例#28
0
def window(lat, lon, lat_source, lon_source, depth_source, nsta, store): 

        center = Array_center(lat, lon)
        source_reciever_dis = orthodrome.distance_accurate50m_numpy(
            lat_source, lon_source, lat, lon)
    
        t_p = np.array([
            store.t("first(p|P)", (depth_source, int(source_reciever_dis[i])))
            for i in range(0, nsta)])
        t_s = np.array([
           store.t("first(s|S)", (depth_source, int(source_reciever_dis[i])))
         for i in range(0, nsta)])
       
        t_origin = util.str_to_time('2008-02-17 11:06:01.10')
       
        def win_(t_, t_l , t_r, center):
            wind_i = t_origin + t_ - t_l
            wind_e = t_origin + t_ + t_r
            t_o = - t_ + t_[center]
            return wind_i, wind_e, t_o
        
        P_wind_i, P_wind_e, t_op = win_(t_p, 5.0 , 20.0, center)
        S_wind_i, S_wind_e, t_os = win_(t_s, 2.0 , 18.0, center)
        
        return P_wind_i, P_wind_e, t_op , center#, S_wind_i, S_wind_e, t_os
示例#29
0
文件: event.py 项目: wuxyair/pyrocko
    def oldloadf(file):
        d = {}
        try:
            for line in file:
                if line.lstrip().startswith('#'):
                    continue

                toks = line.split(' = ', 1)
                if len(toks) == 2:
                    k, v = toks[0].strip(), toks[1].strip()
                    if k in ('name', 'region', 'catalog', 'magnitude_type'):
                        d[k] = v
                    if k in (('latitude longitude magnitude depth duration '
                              'north_shift east_shift '
                              'mnn mee mdd mne mnd med strike1 dip1 rake1 '
                              'strike2 dip2 rake2 duration').split()):
                        d[k] = float(v)
                    if k == 'time':
                        d[k] = util.str_to_time(v)
                    if k == 'tags':
                        d[k] = [x.strip() for x in v.split(',')]

                if line.startswith('---'):
                    d['have_separator'] = True
                    break

        except Exception as e:
            raise FileParseError(e)

        if not d:
            raise EOF()

        if 'have_separator' in d and len(d) == 1:
            raise EmptyEvent()

        mt = None
        m6 = [d[x] for x in 'mnn mee mdd mne mnd med'.split() if x in d]
        if len(m6) == 6:
            mt = moment_tensor.MomentTensor(m=moment_tensor.symmat6(*m6))
        else:
            sdr = [d[x] for x in 'strike1 dip1 rake1'.split() if x in d]
            if len(sdr) == 3:
                moment = 1.0
                if 'moment' in d:
                    moment = d['moment']
                elif 'magnitude' in d:
                    moment = moment_tensor.magnitude_to_moment(d['magnitude'])

                mt = moment_tensor.MomentTensor(strike=sdr[0],
                                                dip=sdr[1],
                                                rake=sdr[2],
                                                scalar_moment=moment)

        return (d.get('latitude', 0.0), d.get('longitude', 0.0),
                d.get('north_shift', 0.0), d.get('east_shift', 0.0),
                d.get('time', 0.0), d.get('name', ''), d.get('depth', None),
                d.get('magnitude', None), d.get('magnitude_type',
                                                None), d.get('region', None),
                d.get('catalog', None), mt, d.get('duration',
                                                  None), d.get('tags', []))
示例#30
0
    def test_evalresp(self, plot=False):

        testdir = os.path.dirname(__file__)

        freqs = num.logspace(num.log10(0.001), num.log10(10.), num=1000)

        transfer = evalresp.evalresp(sta_list='BSEG',
                          cha_list='BHZ',
                          net_code='GR',
                          locid='',
                          instant=util.str_to_time('2012-01-01 00:00:00'),
                          freqs=freqs,
                          units='DIS',
                          file=os.path.join(testdir, 'response', 'RESP.GR.BSEG..BHZ'),
                          rtype='CS')[0][4]

        pzfn = 'SAC_PZs_GR_BSEG_BHZ__2008.254.00.00.00.0000_2599.365.23.59.59.99999'

        zeros, poles, constant = pz.read_sac_zpk(filename=os.path.join(
            testdir, 'response', pzfn))
        
        resp = trace.PoleZeroResponse(zeros, poles, constant)

        transfer2 = resp.evaluate(freqs)

        if plot:
            import pylab as lab
            lab.plot(freqs, num.imag(transfer))
            lab.plot(freqs, num.imag(transfer2))
            lab.gca().loglog() 
            lab.show()

        assert numeq(transfer, transfer2, 1e-4)
示例#31
0
def pdate(s):
    if s.startswith('2599') or s.startswith('2999'):
        return None
    elif s.lower().startswith('no'):
        return None
    else:
        return util.str_to_time(s, format='%Y,%j,%H:%M:%S.OPTFRAC')
示例#32
0
def get_all_scn_mechs():
    mechs = np.loadtxt("ridgecrest/scn_plot.mech", dtype="str")
    dates = []
    strikes = []
    rakes = []
    dips = []
    depths = []
    lats = []
    lons = []
    events = []
    for i in mechs:
        dates.append(i[1][0:4] + "-" + i[1][5:7] + "-" + i[1][8:] + " " + i[2])
        strikes.append(float(i[16]))
        dips.append(float(i[17]))
        rakes.append(float(i[18]))
        lats.append(float(i[7]))
        lons.append(float(i[8]))
        depths.append(float(i[9]))
        mt = pmt.MomentTensor(strike=float(i[16]),
                              dip=float(i[17]),
                              rake=float(i[18]),
                              magnitude=float(i[5]))
        event = model.event.Event(
            lat=float(i[7]),
            lon=float(i[8]),
            depth=float(i[9]),
            moment_tensor=mt,
            magnitude=float(i[5]),
            time=util.str_to_time(i[1][0:4] + "-" + i[1][5:7] + "-" +
                                  i[1][8:] + " " + i[2]))
        events.append(event)
    return events
示例#33
0
    def from_attributes(vals):
        if len(vals) == 14:
            nbasicvals = 7
        else:
            nbasicvals = 4
        nslc_ids, tmin, tmax, kind = Marker.parse_attributes(
            vals[1:1+nbasicvals])

        i = 8
        if len(vals) == 14:
            i = 11

        event_hash = str_to_str_or_none(vals[i-3])
        event_sdate = str_to_str_or_none(vals[i-2])
        event_stime = str_to_str_or_none(vals[i-1])

        if event_sdate is not None and event_stime is not None:
            event_time = util.str_to_time(event_sdate + ' ' + event_stime)
        else:
            event_time = None

        phasename = str_to_str_or_none(vals[i])
        polarity = str_to_int_or_none(vals[i+1])
        automatic = str_to_bool(vals[i+2])
        marker = PhaseMarker(nslc_ids, tmin, tmax, kind, event=None,
                             event_hash=event_hash, event_time=event_time,
                             phasename=phasename, polarity=polarity,
                             automatic=automatic)
        return marker
示例#34
0
def get_events_by_name_or_date(event_names_or_dates, catalog=geofon, magmin=0.):
    stimes = []
    for sev in event_names_or_dates:
        if sev in aliases:
            if isinstance(aliases[sev], str):
                stimes.append(aliases[sev])
            else:
                stimes.extend(aliases[sev])
        else:
            stimes.append(sev)

    events_out = []
    for stime in stimes:
        if op.isfile(stime):
            events_out.extend(model.Event.load_catalog(stime))
        elif stime.startswith('gfz'):
            event = geofon.get_event(stime)
            events_out.append(event)
        else:
            t = util.str_to_time(stime)
            try:
                events = get_events(
                    time_range=(t - 60., t + 60.), magmin=magmin, catalog=catalog)
                events.sort(key=lambda ev: abs(ev.time - t))
                event = events[0]
            except IndexError:
                logger.info('Nothing found in geofon! Trying gCMT!')
                events = get_events(
                    time_range=(t - 60., t + 60.), magmin=magmin, catalog=gcmt)
                events.sort(key=lambda ev: abs(ev.time - t))
                event = events[0]
            events_out.append(event)

    return events_out
示例#35
0
        def regularize_extra(self, val):
            if isinstance(val, datetime.datetime):
                tt = val.utctimetuple()
                val = calendar.timegm(tt) + val.microsecond * 1e-6

            elif isinstance(val, datetime.date):
                tt = val.timetuple()
                val = float(calendar.timegm(tt))

            elif isinstance(val, str) or isinstance(val, unicode):
                val = val.strip()

                val = re.sub(r'(Z|\+00(:?00)?)$', '', val)
                if val[10] == 'T':
                    val = val.replace('T', ' ', 1)

                try:
                    val = util.str_to_time(val)

                except util.TimeStrError:
                    year = int(val[:4])
                    if year > this_year + 100:
                        return None  # StationXML contained a dummy end date

                    raise

            elif isinstance(val, int):
                val = float(val)

            else:
                raise ValidationError(
                    '%s: cannot convert "%s" to float' % (self.xname(), val))

            return val
示例#36
0
def iload_fh(f):
    zeros, poles, constant, comments = pz.read_sac_zpk(file=f,
                                                       get_comments=True)
    d = {}
    for line in comments:
        toks = line.split(':', 1)
        if len(toks) == 2:
            temp = toks[0].strip('* \t')
            for k in ('network', 'station', 'location', 'channel', 'start',
                      'end', 'latitude', 'longitude', 'depth', 'elevation',
                      'dip', 'azimuth', 'input unit', 'output unit'):

                if temp.lower().startswith(k):
                    d[k] = toks[1].strip()

    response = trace.PoleZeroResponse(zeros, poles, constant)

    try:
        yield EnhancedSacPzResponse(
            codes=(d['network'], d['station'], d['location'], d['channel']),
            tmin=util.str_to_time(d['start'], format='%Y-%m-%dT%H:%M:%S'),
            tmax=dummy_aware_str_to_time(d['end']),
            lat=float(d['latitude']),
            lon=float(d['longitude']),
            elevation=float(d['elevation']),
            depth=float(d['depth']),
            dip=float(d['dip']),
            azimuth=float(d['azimuth']),
            input_unit=d['input unit'],
            output_unit=d['output unit'],
            response=response)
    except:
        raise EnhancedSacPzError('cannot get all required information')
示例#37
0
    def from_attributes(vals):
        if len(vals) == 14:
            nbasicvals = 7
        else:
            nbasicvals = 4
        nslc_ids, tmin, tmax, kind = Marker.parse_attributes(
            vals[1:1+nbasicvals])

        i = 8
        if len(vals) == 14:
            i = 11

        event_hash = str_to_str_or_none(vals[i-3])
        event_sdate = str_to_str_or_none(vals[i-2])
        event_stime = str_to_str_or_none(vals[i-1])

        if event_sdate is not None and event_stime is not None:
            event_time = util.str_to_time(event_sdate + ' ' + event_stime)
        else:
            event_time = None

        phasename = str_to_str_or_none(vals[i])
        polarity = str_to_int_or_none(vals[i+1])
        automatic = str_to_bool(vals[i+2])
        marker = PhaseMarker(nslc_ids, tmin, tmax, kind, event=None,
                             event_hash=event_hash, event_time=event_time,
                             phasename=phasename, polarity=polarity,
                             automatic=automatic)
        return marker
示例#38
0
def get_events_by_name_or_date(event_names_or_dates, catalog=geofon):
    stimes = []
    for sev in event_names_or_dates:
        if sev in aliases:
            if isinstance(aliases[sev], str):
                stimes.append(aliases[sev])
            else:
                stimes.extend(aliases[sev])
        else:
            stimes.append(sev)

    events_out = []
    for stime in stimes:
        if op.isfile(stime):
            events_out.extend(model.Event.load_catalog(stime))
        elif stime.startswith('gfz'):
            event = geofon.get_event(stime)
            events_out.append(event)
        else:
            t = util.str_to_time(stime)
            events = get_events(time_range=(t - 60., t + 60.), catalog=catalog)
            events.sort(key=lambda ev: abs(ev.time - t))
            event = events[0]
            events_out.append(event)

    return events_out
示例#39
0
文件: station.py 项目: hsudha/pyrocko
        def regularize_extra(self, val):
            if isinstance(val, datetime.datetime):
                tt = val.utctimetuple()
                val = calendar.timegm(tt) + val.microsecond * 1e-6

            elif isinstance(val, datetime.date):
                tt = val.timetuple()
                val = float(calendar.timegm(tt))

            elif isinstance(val, str) or isinstance(val, unicode):
                val = val.strip()

                val = re.sub(r'(Z|\+00(:?00)?)$', '', val)
                if val[10] == 'T':
                    val = val.replace('T', ' ', 1)

                try:
                    val = util.str_to_time(val)

                except util.TimeStrError:
                    year = int(val[:4])
                    if year > this_year + 100:
                        return None  # StationXML contained a dummy end date

                    raise

            elif isinstance(val, int):
                val = float(val)

            else:
                raise ValidationError('%s: cannot convert "%s" to float' %
                                      (self.xname(), val))

            return val
示例#40
0
    def test_evalresp(self, plot=False):

        resp_fpath = common.test_data_file('test2.resp')

        freqs = num.logspace(num.log10(0.001), num.log10(10.), num=1000)

        transfer = evalresp.evalresp(
            sta_list='BSEG',
            cha_list='BHZ',
            net_code='GR',
            locid='',
            instant=util.str_to_time('2012-01-01 00:00:00'),
            freqs=freqs,
            units='DIS',
            file=resp_fpath,
            rtype='CS')[0][4]

        pz_fpath = common.test_data_file('test2.sacpz')

        zeros, poles, constant = pz.read_sac_zpk(pz_fpath)

        resp = trace.PoleZeroResponse(zeros, poles, constant)

        transfer2 = resp.evaluate(freqs)

        if plot:
            plot_tfs(freqs, [transfer, transfer2])

        assert numeq(transfer, transfer2, 1e-4)
示例#41
0
文件: model.py 项目: shineusn/pyrocko
    def oldloadf(file):
        d = {}
        try:
            for line in file:
                if line.lstrip().startswith('#'):
                    continue

                toks = line.split(' = ', 1)
                if len(toks) == 2:
                    k, v = toks[0].strip(), toks[1].strip()
                    if k in ('name', 'region', 'catalog', 'magnitude_type'):
                        d[k] = v
                    if k in (('latitude longitude magnitude depth duration '
                              'mnn mee mdd mne mnd med strike1 dip1 rake1 '
                              'strike2 dip2 rake2 duration').split()):
                        d[k] = float(v)
                    if k == 'time':
                        d[k] = util.str_to_time(v)

                if line.startswith('---'):
                    d['have_separator'] = True
                    break

        except Exception, e:
            raise FileParseError(e)
示例#42
0
    def test_scenario_map(self):
        tempdir = mkdtemp(prefix='pyrocko-scenario')
        self.tempdirs.append(tempdir)

        generator = scenario.ScenarioGenerator(
            seed=20,
            center_lat=42.6,
            center_lon=13.3,
            radius=60*km,
            target_generators=[
                targets.WaveformGenerator(
                    store_id=ScenarioTestCase.store_id,
                    station_generator=targets.RandomStationGenerator(
                        avoid_water=False),
                    noise_generator=targets.waveform.WhiteNoiseGenerator(),
                    seismogram_quantity='velocity'),
                targets.InSARGenerator(
                    resolution=(20, 20),
                    noise_generator=targets.insar.AtmosphericNoiseGenerator(
                        amplitude=1e-5)),
                targets.GNSSCampaignGenerator(
                    station_generator=targets.RandomStationGenerator(
                        avoid_water=False,
                        channels=None))
                ],
            source_generator=scenario.DCSourceGenerator(
                time_min=util.str_to_time('2017-01-01 00:00:00'),
                time_max=util.str_to_time('2017-01-01 02:00:00'),
                radius=10*km,
                depth_min=1*km,
                depth_max=10*km,
                magnitude_min=3.0,
                magnitude_max=4.5,
                strike=120.,
                dip=45.,
                rake=90.,
                perturbation_angle_std=15.,
                nevents=3)
        )

        engine = gf.get_engine()

        collection = scenario.ScenarioCollection(tempdir, engine)
        collection.add_scenario('plot', generator)

        s = collection.get_scenario('plot')
        s.get_map()
    def plot():
        '''
        PLOT PARAMETERS CAN BE ADJUSTED IN THIS METHOD
        '''
        # initialize plotter-instance with pickled stationbox:
        stationBoxFile = open('stationBox.p','r')
        pplotter = plotter(pickle.load(stationBoxFile))
        cmt_lat = 37.10
        cmt_lon = -3.69

        # plot picks (Event coordinates needed to get relative distance):
        CMTevent=model.Event(cmt_lat, cmt_lon, str_to_time("2010-04-11 22:08:15.500"), 
           "Spain_Durcal" , 625500 ,6.3)
        pplotter.plotpicks(CMTevent)

        # plot misfits:
        try:
            pickledResults = open('numpy_results.p','r')
            resultArray = pickle.load(pickledResults)
        finally:
            pickledResults.close()
        
        #pplotter.plotMisfits(data, _depths)
        #.............................................................
        # plot contour misfits for each model:
        # 1st: load misfit array:
        for phaseKey in ['p', 's', 'Scs', 'Scss']:    
            pplotter.plotContourMisfit(resultArray, _lats, _lons, _depths, phaseKey, _models[3])
        

        #.............................................................
        # set time shift file:
        tShiftFiles = glob.glob('map/t_shift*.txt')
        
        if tShiftFiles==[] or (tShiftFiles!=[] and (os.stat(tShiftFiles[0])[8] < os.stat('numpy_results.p')[8])): 
            for phase in ['p','s']:
                for testmodel in _models:
                    print '(re)setting t-shift files for model: {0} and phase:{1}'.format(testmodel, phase)
                    depthfinder.setTshiftFile(CMTevent, np.searchsorted(_lats, CMTevent.lat, ), 
                                np.searchsorted(_lons, CMTevent.lon), resultArray, testmodel, phase, _depths, maxdist )               
        else:
            pass

        for shiftmodel in _models:
            pplotter.plotMisfitShiftMap('s',shiftmodel)

        #.............................................................
        #write horizontal layers to image files;
        #pplotter.saveMisfitLayersAsImages(resultArray, _lats, _lons, _depths, 'models/premlocal1_Stich.nd','p', 630000, 600000 )
        #asdf 

        CMTeventIndexLongitude = getIndexOfValueInArray(_lons, cmt_lon)
        CMTeventIndexLatitude = getIndexOfValueInArray(_lats, cmt_lat)
        pplotter.plotMisfitsForLocation(_models[0], CMTeventIndexLatitude, CMTeventIndexLongitude, resultArray, _depths)


        #.............................................................
        # create map
        subprocess.call("map/durcal_stations.sh&", shell=True)
    def call(self):
         
        self.cleanup()
        
        # Set up receiver configuration.
        tab = '''
        HH  3. 3. 0
        '''.strip()

        receivers = []
        station, lat, lon, depth = tab.split()

        d_north=self.d_north
        d_east=self.d_east
        origin_lat, origin_lon = orthodrome.ne_to_latlon_alternative_method(float(lat), float(lon), d_north, d_east)
        r = receiver.Receiver(lat,lon, components='neu', name='.%s.' % station)
        receivers.append(r)

        # Composition of the source
        otime = util.str_to_time('2000-01-1 00:00:00')
        db = self.db

        seis = seismosizer.Seismosizer(hosts=['localhost'])
        seis.set_database(db)
        seis.set_effective_dt(db.dt)
        seis.set_local_interpolation('bilinear')
        seis.set_receivers(receivers)
        seis.set_source_location( origin_lat, origin_lon, otime)
        seis.set_source_constraints (0, 0, 0, 0 ,0 ,-1)
        self.seis = seis        
        seis = None

        risetime=3; moment=1.
        s = source.Source('bilateral',
        sourceparams_str='0 0 0 %g %g %g %g %g 0 0 0 0 1 %g' % (self.source_depth, moment, self.strike, self.dip, self.rake, risetime))
        self.seis.set_source(s)
        recs = self.seis.get_receivers_snapshot( which_seismograms = ('syn',), which_spectra=(), which_processing='tapered')
        
        trs = []
        for rec in recs:
            if self.save_mseed is True:
                rec.save_traces_mseed(filename_tmpl='%(whichset)s_%(network)s_%(station)s_%(location)s_%(channel)s.mseed' )
            trs.extend(rec.get_traces())
        self.add_traces(trs)
        # Define fade in and out, band pass filter and cut off fader for the TF.
        tfade = self.tfade
        freqlimit = (0.005,.006,1,1.2)
        cut_off_fading = 50
        ntraces = []
        
        for tr in trs:
            TF = STS2()
            
            # Save synthetic trace after transfer function was applied.
            trace_filtered = tr.transfer(tfade, freqlimit, TF, cut_off_fading)            
            # Set new codes to the filtered trace to make it identifiable.
            rename={'e':'BHE','n':'BHN','u':'BHZ'}
            trace_filtered.set_codes(channel=rename[trace_filtered.channel], network='STS2', station='HH', location='syn')
            ntraces.append(trace_filtered)            
示例#45
0
    def test_conversions(self):

        from pyrocko import model
        from pyrocko.fdsn import station, resp, enhanced_sacpz

        t = util.str_to_time('2014-01-01 00:00:00')
        codes = 'GE', 'EIL', '', 'BHZ'

        resp_fpath = common.test_data_file('test1.resp')
        stations = [
            model.Station(*codes[:3],
                          lat=29.669901,
                          lon=34.951199,
                          elevation=210.0,
                          depth=0.0)
        ]

        sx_resp = resp.make_stationxml(stations,
                                       resp.iload_filename(resp_fpath))
        pr_sx_resp = sx_resp.get_pyrocko_response(codes,
                                                  time=t,
                                                  fake_input_units='M/S')
        pr_evresp = trace.Evalresp(resp_fpath,
                                   nslc_id=codes,
                                   target='vel',
                                   time=t)

        sacpz_fpath = common.test_data_file('test1.sacpz')
        sx_sacpz = enhanced_sacpz.make_stationxml(
            enhanced_sacpz.iload_filename(sacpz_fpath))
        pr_sx_sacpz = sx_sacpz.get_pyrocko_response(codes,
                                                    time=t,
                                                    fake_input_units='M/S')
        pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath))
        try:
            pr_sacpz.zeros.remove(0.0j)
        except ValueError:
            pr_sacpz.poles.append(0.0j)

        sxml_geofon_fpath = common.test_data_file('test1.stationxml')
        sx_geofon = station.load_xml(filename=sxml_geofon_fpath)
        pr_sx_geofon = sx_geofon.get_pyrocko_response(codes,
                                                      time=t,
                                                      fake_input_units='M/S')

        sxml_iris_fpath = common.test_data_file('test2.stationxml')
        sx_iris = station.load_xml(filename=sxml_iris_fpath)
        pr_sx_iris = sx_iris.get_pyrocko_response(codes,
                                                  time=t,
                                                  fake_input_units='M/S')

        freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000)
        tf_ref = pr_evresp.evaluate(freqs)
        for pr in [
                pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon, pr_sx_iris
        ]:
            tf = pr.evaluate(freqs)
            # plot_tfs(freqs, [tf_ref, tf])
            assert cnumeqrel(tf_ref, tf, 0.01)
示例#46
0
    def testGeofonMT(self):
        cat = catalog.Geofon()
        tmin = util.str_to_time('2014-01-01 00:00:00')
        tmax = util.str_to_time('2017-01-01 00:00:00')
        events_a = cat.get_events((tmin, tmax), magmin=8)
        events_b = [
            cat.get_event('gfz2015sfdd'),
            cat.get_event('gfz2014gkgf')]

        for events in [events_a, events_b]:
            self.assertEqual(len(events), 2)
            mt1, mt2 = [ev.moment_tensor for ev in events]
            angle = moment_tensor.kagan_angle(mt1, mt2)
            self.assertEqual(round(angle - 7.7, 1), 0.0)

        ev = cat.get_event('gfz2020vimx')
        assert isinstance(ev.moment_tensor, moment_tensor.MomentTensor)
示例#47
0
    def parse_attributes(vals):
        tmin = util.str_to_time(vals[0] + ' ' + vals[1])
        i = 2
        tmax = tmin
        if len(vals) == 7:
            tmax = util.str_to_time(vals[2] + ' ' + vals[3])
            i = 5

        kind = int(vals[i])
        traces = vals[i+1]
        if traces == 'None':
            nslc_ids = []
        else:
            nslc_ids = tuple(
                [tuple(nslc_id.split('.')) for nslc_id in traces.split(',')])

        return nslc_ids, tmin, tmax, kind
示例#48
0
    def parse_attributes(vals):
        tmin = util.str_to_time(vals[0] + ' ' + vals[1])
        i = 2
        tmax = tmin
        if len(vals) == 7:
            tmax = util.str_to_time(vals[2] + ' ' + vals[3])
            i = 5

        kind = int(vals[i])
        traces = vals[i+1]
        if traces == 'None':
            nslc_ids = []
        else:
            nslc_ids = tuple(
                [tuple(nslc_id.split('.')) for nslc_id in traces.split(',')])

        return nslc_ids, tmin, tmax, kind
    def runSerial(models=None):
        '''
        Execute serial processing (1 CPU). 
        :param models: list of models to investigate
                if no models are given, all models will be investigated.
        '''

        if models==None:
            models2use = _models
        else:
            models2use = [models]
        
        iteration=0
        iterations = len(models2use)*len(_depths)*len(_lats)*len(_lons)
        sys.stdout.write('calculating misfits... ')
        ProBar = progressbar.ProgressBar(maxval=iterations).start()

        # instantiate result array as numpy nd array:
        resultArray = np.ndarray(shape=(len(_lats), len(_lons), len(_depths), 4)) 

        
        for mod in models2use:
            misfits={}
            misfits['pMF']=[]; misfits['sMF']=[]
            misfits['ScsMF']=[]; misfits['ScssMF']=[]
 
            loadmod=cake.load_model(mod)
            
            for latindx, lat in enumerate(_lats):
                for lonindx, lon in enumerate(_lons):
                    for zindex, z in enumerate(_depths):
                        iteration+=1
                        eve=model.Event(lat,lon,str_to_time("2010-04-11 22:08:15.500"), 
                           "Spain_Durcal" , z,6.3)
                        [ttpdiff, ttsdiff, ttScsdiff, ttScssdiff] = depthfinder.startup(loadmod, eve, maxdist) 
                        
                        [pMF, sMF, ScsMF, ScssMF]= map(
                                    lambda x: calculateMisfit(x,maxdist), [ttpdiff,ttsdiff,ttScsdiff,ttScssdiff])
                        # update progressbar
                        ProBar.update(iteration)

                        # write data to numpy array:
                        resultArray[latindx][lonindx][zindex] = [pMF, sMF, ScsMF, ScssMF]
                        
            results[mod]=resultArray
            depthfinder.storeStationBox()
        
        # finish progressbar:
        ProBar.finish()
        # write dict to pickled data:
        try:
            output = open('numpy_results.p','w')
            pickle.dump(results, output)
        finally:
            output.close()
       
        # write used stations file:
        depthfinder._myStationBox.writeUsedStationsFile()
示例#50
0
def load_ev_dict_list(path=None, nevent=0):

    events = num.loadtxt("data/geres_epi.csv", delimiter="\t", dtype='str')
    event_marker_out = []
    ev_dict_list = []
    if nevent is not None:
        events = events[0:nevent]
    for ev in events:
        date = str(ev[1])
        time = str(ev[2])
        try:
            h, m = [int(s) for s in time.split('.')]
        except Exception:
            h = time
            m = 0.
        if len(str(h)) == 5:
            time = "0" + time[0] + ":" + time[1:3] + ":" + time[3:5] + time[5:]
        elif len(str(h)) == 4:
            time = "00" + ":" + time[0:2] + ":" + time[3:5] + time[5:]
        elif len(str(h)) == 3:
            time = "00" + ":" + "0" + time[0] + ":" + time[1:4] + time[4:]
        else:
            time = time[0:2] + ":" + time[2:4] + ":" + time[4:5] + time[5:]
        date = str(date[0:4]) + "-" + str(date[4:6] + "-" + date[6:8] + " ")
        ev_time = util.str_to_time(date + time)
        try:
            ev_dict_list.append(
                dict(id=ev[0],
                     time=ev_time,
                     lat=float(ev[3]),
                     lon=float(ev[4]),
                     mag=float(ev[5]),
                     mag_type=ev[6],
                     source=ev[7],
                     phases=[],
                     depth=[],
                     rms=[],
                     error_h=[],
                     error_z=[]))
        except:
            ev_dict_list.append(
                dict(id=ev[0],
                     time=ev_time,
                     lat=float(ev[3]),
                     lon=float(ev[4]),
                     mag=None,
                     mag_type=None,
                     source=ev[5],
                     phases=[],
                     depth=[],
                     rms=[],
                     error_h=[],
                     error_z=[]))

    picks = num.loadtxt("data/geres_phas.csv", delimiter="\t", dtype='str')
    return ev_dict_list, picks
示例#51
0
def context(fn):
    from pyrocko import datacube_ext

    dpath = os.path.dirname(os.path.abspath(fn))
    mtimes = [os.stat(dpath)[8]]

    dentries = sorted([
        os.path.join(dpath, f) for f in os.listdir(dpath)
        if os.path.isfile(os.path.join(dpath, f))
    ])
    for dentry in dentries:
        fn2 = os.path.join(dpath, dentry)
        mtimes.append(os.stat(fn2)[8])

    mtime = float(max(mtimes))

    if dpath in g_dir_contexts:
        dir_context = g_dir_contexts[dpath]
        if dir_context.mtime == mtime:
            return dir_context

        del g_dir_contexts[dpath]

    entries = []
    for dentry in dentries:
        fn2 = os.path.join(dpath, dentry)
        if not os.path.isfile(fn2):
            continue

        with open(fn2, 'rb') as f:
            first512 = f.read(512)
            if not detect(first512):
                continue

        with open(fn2, 'rb') as f:
            try:
                header, data_arrays, gps_tags, nsamples, _ = \
                        datacube_ext.load(f.fileno(), 3, 0, -1, None)

            except datacube_ext.DataCubeError as e:
                e = DataCubeError(str(e))
                e.set_context('filename', fn)
                raise e

        header = dict(header)
        entries.append(
            DirContextEntry(
                path=os.path.abspath(fn2),
                tstart=util.str_to_time('20' + header['S_DATE'] + ' ' +
                                        header['S_TIME'],
                                        format='%Y/%m/%d %H:%M:%S'),
                ifile=int(header['DAT_NO'])))

    dir_context = DirContext(mtime=mtime, path=dpath, entries=entries)

    return dir_context
    def testTime(self):

        for fmt, accu in zip(
                ['%Y-%m-%d %H:%M:%S.3FRAC', '%Y-%m-%d %H:%M:%S.2FRAC',
                 '%Y-%m-%d %H:%M:%S.1FRAC', '%Y-%m-%d %H:%M:%S',
                 '%Y-%m-%d %H.%M.%S.3FRAC'],
                [0.001, 0.01, 0.1, 1., 0.001, 0.001]):

            ta = util.str_to_time('1960-01-01 10:10:10')
            tb = util.str_to_time('2020-01-01 10:10:10')

            for i in range(10000):
                t1 = ta + random() * (tb-ta)
                s = util.time_to_str(t1, format=fmt)
                t2 = util.str_to_time(s, format=fmt)
                assert abs(t1 - t2) < accu
                fmt_opt = re.sub(r'\.[0-9]FRAC$', '', fmt) + '.OPTFRAC'
                t3 = util.str_to_time(s, format=fmt_opt)
                assert abs(t1 - t3) < accu
示例#53
0
def subset_events_dist_cat(catalog, mag_min, mag_max,
                       tmin, tmax, st_lat, st_lon,
                       dist_min=None, dist_max=None):
    """
    Extract a subset of events from event catalog

    :param catalog: Event catalog in pyrocko format
    :param mag_min: Min. magnitude
    :param mag_max: Max. magnitude
    :param tmin: string representing UTC time
    :param tmax: string representing UTC time
    :param format tmin: time string format ('%Y-%m-%d %H:%M:%S.OPTFRAC')
    :param format tmax: time string format ('%Y-%m-%d %H:%M:%S.OPTFRAC')
    :param dist_min: min. distance (km)
    :param dist_max: max. distance (km)
    :param depth_min
    :param depth_max

    :returns: list of events
    """

    use_events = []
    events = model.load_events(catalog)
    for ev in events:
        if ev.magnitude < mag_max and\
          ev.magnitude > mag_min and\
          ev.time < util.str_to_time(tmax) and\
          ev.time > util.str_to_time(tmin):
            if dist_min or dist_max:
                dist = orthodrome.distance_accurate50m_numpy(
                       ev.lat, ev.lon, st_lat, st_lon)/1000.

                if dist_min and dist_max and\
                  dist > dist_min and dist < dist_max:
                    use_events.append(ev)

                if dist_min and not dist_max and dist > dist_min:
                    use_events.append(ev)

                if dist_max and not dist_min and dist < dist_max:
                    use_events.append(ev)

    return(use_events)
示例#54
0
def well_geometry_sparrow_export(file, folder, name):
    import utm
    from pyproj import Proj
    from pyrocko.model import Geometry
    data = num.loadtxt(file, delimiter=",")
    utmx = data[:, 0]
    utmy = data[:, 1]
    z = data[:, 2]

    proj_gk4 = Proj(init="epsg:31467")  # GK-projection
    lons, lats = proj_gk4(utmx, utmy, inverse=True)
    ev = event.Event(lat=num.mean(lats), lon=num.mean(lons), depth=num.mean(z),
                     time=util.str_to_time("2000-01-01 01:01:01"))

    ncorners = 4
    verts = []
    xs = []
    ys = []
    dist = 200.
    for i in range(0, len(z)):
        try:
            x = lats[i]
            y = lons[i]
            x1 = lats[i+1]
            y1 = lons[i+1]
            depth = z[i]
            depth1 = z[i+1]
            xyz = ([dist/2.8, dist/2.8, depth], [dist/2.8, dist/2.8, depth1],
                   [dist/2.8, -dist/2.8, depth1], [dist/2.8, -dist/2.8, depth])
            latlon = ([x, y], [x1, y1], [x1, y1], [x, y])
            patchverts = num.hstack((latlon, xyz))
            verts.append(patchverts)
        except Exception:
            pass

    vertices = num.vstack(verts)

    npatches = int(len(vertices))
    faces1 = num.arange(ncorners * npatches, dtype='int64').reshape(
        npatches, ncorners)
    faces2 = num.fliplr(faces1)
    faces = num.hstack((faces2, faces1))
    srf_semblance_list = []

    srf_semblance = num.ones(num.shape(data[:, 2]))
    srf_semblance = duplicate_property(srf_semblance)
    srf_semblance_list.append(srf_semblance)

    srf_semblance = num.asarray(srf_semblance_list).T
    srf_times = num.linspace(0, 1, 1)
    geom = Geometry(times=srf_times, event=ev)
    geom.setup(vertices, faces)
    sub_headers = tuple([str(i) for i in srf_times])
    geom.add_property((('semblance', 'float64', sub_headers)), srf_semblance)
    dump(geom, filename='geom_gtla1.yaml')
示例#55
0
    def test_source_times(self):
        store = self.dummy_store()
        for S in gf.source_classes:
            if not hasattr(S, 'discretize_basesource'):
                continue

            for t in [0.0, util.str_to_time('2014-01-01 10:00:00')]:
                source = S(time=t)
                dsource = source.discretize_basesource(store)
                cent = dsource.centroid()
                assert numeq(cent.time + source.get_timeshift(), t, 0.0001)
示例#56
0
def context(fn):
    from pyrocko import datacube_ext

    dpath = os.path.dirname(os.path.abspath(fn))
    mtimes = [os.stat(dpath)[8]]

    dentries = sorted([os.path.join(dpath, f) for f in os.listdir(dpath)
                       if os.path.isfile(os.path.join(dpath, f))])
    for dentry in dentries:
        fn2 = os.path.join(dpath, dentry)
        mtimes.append(os.stat(fn2)[8])

    mtime = float(max(mtimes))

    if dpath in g_dir_contexts:
        dir_context = g_dir_contexts[dpath]
        if dir_context.mtime == mtime:
            return dir_context

        del g_dir_contexts[dpath]

    entries = []
    for dentry in dentries:
        fn2 = os.path.join(dpath, dentry)
        if not os.path.isfile(fn2):
            continue

        with open(fn2, 'rb') as f:
            first512 = f.read(512)
            if not detect(first512):
                continue

        with open(fn2, 'rb') as f:
            try:
                header, data_arrays, gps_tags, nsamples, _ = \
                        datacube_ext.load(f.fileno(), 3, 0, -1, None)

            except datacube_ext.DataCubeError as e:
                e = DataCubeError(str(e))
                e.set_context('filename', fn)
                raise e

        header = dict(header)
        entries.append(DirContextEntry(
            path=os.path.abspath(fn2),
            tstart=util.str_to_time(
                '20' + header['S_DATE'] + ' ' + header['S_TIME'],
                format='%Y/%m/%d %H:%M:%S'),
            ifile=int(header['DAT_NO'])))

    dir_context = DirContext(mtime=mtime, path=dpath, entries=entries)

    return dir_context
示例#57
0
    def testGeofon(self):

        cat = catalog.Geofon()

        tmin = util.str_to_time('2010-01-12 21:50:00')
        tmax = util.str_to_time('2010-01-13 03:17:00')

        names = cat.get_event_names(
            time_range=(tmin, tmax), nmax=10, magmin=5.)

        assert len(names) > 0
        ident = None
        for name in names:
            ev = cat.get_event(name)
            if ev.magnitude >= 7:
                is_the_haiti_event_geofon(ev)
                ident = ev.name

        assert ident is not None

        cat.flush()
    def test_conversions(self):

        from pyrocko import model
        from pyrocko.io import resp, enhanced_sacpz
        from pyrocko.io import stationxml

        t = util.str_to_time('2014-01-01 00:00:00')
        codes = 'GE', 'EIL', '', 'BHZ'

        resp_fpath = common.test_data_file('test1.resp')
        stations = [model.Station(
            *codes[:3],
            lat=29.669901,
            lon=34.951199,
            elevation=210.0,
            depth=0.0)]

        sx_resp = resp.make_stationxml(
            stations, resp.iload_filename(resp_fpath))

        pr_sx_resp = sx_resp.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')
        pr_evresp = trace.Evalresp(
            resp_fpath, nslc_id=codes, target='vel', time=t)

        sacpz_fpath = common.test_data_file('test1.sacpz')
        sx_sacpz = enhanced_sacpz.make_stationxml(
            enhanced_sacpz.iload_filename(sacpz_fpath))
        pr_sx_sacpz = sx_sacpz.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')
        pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath))
        try:
            pr_sacpz.zeros.remove(0.0j)
        except ValueError:
            pr_sacpz.poles.append(0.0j)

        sxml_geofon_fpath = common.test_data_file('test1.stationxml')
        sx_geofon = stationxml.load_xml(filename=sxml_geofon_fpath)
        pr_sx_geofon = sx_geofon.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')

        sxml_iris_fpath = common.test_data_file('test2.stationxml')
        sx_iris = stationxml.load_xml(filename=sxml_iris_fpath)
        pr_sx_iris = sx_iris.get_pyrocko_response(
            codes, time=t, fake_input_units='M/S')

        freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000)
        tf_ref = pr_evresp.evaluate(freqs)
        for pr in [pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon,
                   pr_sx_iris]:
            tf = pr.evaluate(freqs)
            # plot_tfs(freqs, [tf_ref, tf])
            assert cnumeqrel(tf_ref, tf, 0.01)
示例#59
0
        def regularize_extra(self, val):
            if isinstance(val, datetime.datetime):
                tt = val.utctimetuple()
                val = calendar.timegm(tt) + val.microsecond * 1e-6  

            elif isinstance(val, str) or isinstance(val, unicode):
                val = str_to_time(val, format='%Y-%m-%d')
            
            if not isinstance(val, float):
                val = float(val)

            return val
示例#60
0
    def test_scenario_combinations(self):
        import copy
        generator = copy.deepcopy(self.generator)
        engine = gf.get_engine()
        generator.init_modelling(engine)

        for src in scenario.sources.AVAILABLE_SOURCES:
            generator.source_generator = src(
                time_min=util.str_to_time('2017-01-01 00:00:00'),
                time_max=util.str_to_time('2017-01-01 02:00:00'),
                radius=1*km,
                depth_min=1.5*km,
                depth_max=5*km,
                magnitude_min=3.0,
                magnitude_max=4.5)
            generator.source_generator.update_hierarchy(generator)

            generator.get_stations()
            generator.get_waveforms()
            generator.get_insar_scenes()
            generator.get_gnss_campaigns()