Пример #1
0
def rfstats(stats=None, event=None, station=None, stream=None,
            phase='P', dist_range=None):
    """
    Calculate ray specific values like slowness for given event and station.

    :param stats: stats object with event and/or station attributes. Can be
        None if both event and station are given.
    :param event: ObsPy :class:`~obspy.core.event.Event` object
    :param station: station object with attributes latitude, longitude and
        elevation
    :param stream: If a stream is given, stats has to be None. In this case
        rfstats will be called for every stats object in the stream.
    :param phase: string with phase to look for in result of
        :func:`~obspy.taup.taup.getTravelTimes`. Usually this will be 'P' or
        'S' for P and S receiver functions, respectively.
    :type dist_range: tuple of length 2
    :param dist_range: if epicentral of event is not in this intervall, None
        is returned by this function,\n
        if phase == 'P' defaults to (30, 90),\n
        if phase == 'S' defaults to (50, 85)

    :return: ``stats`` object with event and station attributes, distance,
        back_azimuth, inclination, onset and slowness or None if epicentral
        distance is not in the given intervall
    """
    if stream is not None:
        assert stats is None
        for tr in stream:
            rfstats(tr.stats, event, station, None, phase, dist_range)
        return
    phase = phase.upper()
    if dist_range is None and phase in 'PS':
        dist_range = (30, 90) if phase == 'P' else (50, 85)
    if stats is None:
        stats = AttribDict({})
    stats.update(obj2stats(event=event, station=station))
    dist, baz, _ = gps2DistAzimuth(stats.station_latitude,
                                   stats.station_longitude,
                                   stats.event_latitude,
                                   stats.event_longitude)
    dist = kilometer2degrees(dist / 1000)
    if dist_range and not dist_range[0] <= dist <= dist_range[1]:
        return
    tts = getTravelTimes(dist, stats.event_depth)
    tts2 = getTravelTimes(dist, 0)
    tts = [tt for tt in tts if tt['phase_name'] == phase]
    tts2 = [tt for tt in tts2 if tt['phase_name'] == phase]
    if len(tts) == 0 or len(tts2) == 0:
        raise Exception('Taup does not return phase %s at event distance %s' %
                        (phase, dist))
    onset = stats.event_time + tts[0]['time']
    inc = tts2[0]['take-off angle']  # approximation
    v = 5.8 if 'P' in phase else 3.36  # iasp91
    slowness = 6371. * sin(pi / 180. * inc) / v / 180 * pi
    stats.update({'distance': dist, 'back_azimuth': baz, 'inclination': inc,
                  'onset': onset, 'slowness': slowness})
    return stats
Пример #2
0
 def test_getTravelTimesIASP91(self):
     """
     Tests getTravelTimes method using model iasp91.
     """
     # read output results from original program
     filename = os.path.join(self.path, 'sample_ttimes_iasp91.lst')
     with open(filename, 'rt') as fp:
         data = fp.readlines()
     #1
     tt = getTravelTimes(delta=52.474, depth=611.0, model='iasp91')
     lines = data[5:29]
     self.assertEqual(len(tt), len(lines))
     # check calculated tt against original
     for i in range(len(lines)):
         parts = lines[i][13:].split()
         item = tt[i]
         self.assertEqual(item['phase_name'], parts[0].strip())
         self.assertAlmostEqual(item['time'], float(parts[1].strip()), 2)
         self.assertAlmostEqual(item['take-off angle'],
                                float(parts[2].strip()), 2)
         self.assertAlmostEqual(item['dT/dD'], float(parts[3].strip()), 2)
         self.assertAlmostEqual(item['dT/dh'], float(parts[4].strip()), 2)
         self.assertAlmostEqual(item['d2T/dD2'],
                                float(parts[5].strip()), 2)
     #2
     tt = getTravelTimes(delta=50.0, depth=300.0, model='iasp91')
     lines = data[34:59]
     self.assertEqual(len(tt), len(lines))
     # check calculated tt against original
     for i in range(len(lines)):
         parts = lines[i][13:].split()
         item = tt[i]
         self.assertEqual(item['phase_name'], parts[0].strip())
         self.assertAlmostEqual(item['time'], float(parts[1].strip()), 2)
         self.assertAlmostEqual(item['take-off angle'],
                                float(parts[2].strip()), 2)
         self.assertAlmostEqual(item['dT/dD'], float(parts[3].strip()), 2)
         self.assertAlmostEqual(item['dT/dh'], float(parts[4].strip()), 2)
         self.assertAlmostEqual(item['d2T/dD2'],
                                float(parts[5].strip()), 2)
     #3
     tt = getTravelTimes(delta=150.0, depth=300.0, model='iasp91')
     lines = data[61:89]
     self.assertEqual(len(tt), len(lines))
     # check calculated tt against original
     for i in range(len(lines)):
         parts = lines[i][13:].split()
         item = tt[i]
         self.assertEqual(item['phase_name'], parts[0].strip())
         self.assertAlmostEqual(item['time'], float(parts[1].strip()), 2)
         self.assertAlmostEqual(item['take-off angle'],
                                float(parts[2].strip()), 2)
         self.assertAlmostEqual(item['dT/dD'], float(parts[3].strip()), 2)
         self.assertAlmostEqual(item['dT/dh'], float(parts[4].strip()), 2)
         self.assertAlmostEqual(item['d2T/dD2'],
                                float(parts[5].strip()), 2)
Пример #3
0
 def test_getTravelTimesIASP91(self):
     """
     Tests getTravelTimes method using model iasp91.
     """
     # read output results from original program
     file = os.path.join(self.path, 'sample_ttimes_iasp91.lst')
     data = open(file, 'rt').readlines()
     #1
     tt = getTravelTimes(delta=52.474, depth=611.0, model='iasp91')
     lines = data[5:29]
     self.assertEquals(len(tt), len(lines))
     # check calculated tt against original
     for i in range(len(lines)):
         parts = lines[i][13:].split()
         item = tt[i]
         self.assertEquals(item['phase_name'], parts[0].strip())
         self.assertAlmostEquals(item['time'], float(parts[1].strip()), 3)
         self.assertAlmostEquals(item['take-off angle'],
                                 float(parts[2].strip()), 2)
         self.assertAlmostEquals(item['dT/dD'], float(parts[3].strip()), 3)
         self.assertAlmostEquals(item['dT/dh'], float(parts[4].strip()), 3)
         self.assertAlmostEquals(item['d2T/dD2'], float(parts[5].strip()),
                                 3)
     #2
     tt = getTravelTimes(delta=50.0, depth=300.0, model='iasp91')
     lines = data[34:59]
     self.assertEquals(len(tt), len(lines))
     # check calculated tt against original
     for i in range(len(lines)):
         parts = lines[i][13:].split()
         item = tt[i]
         self.assertEquals(item['phase_name'], parts[0].strip())
         self.assertAlmostEquals(item['time'], float(parts[1].strip()), 3)
         self.assertAlmostEquals(item['take-off angle'],
                                 float(parts[2].strip()), 2)
         self.assertAlmostEquals(item['dT/dD'], float(parts[3].strip()), 3)
         self.assertAlmostEquals(item['dT/dh'], float(parts[4].strip()), 3)
         self.assertAlmostEquals(item['d2T/dD2'], float(parts[5].strip()),
                                 3)
     #3
     tt = getTravelTimes(delta=150.0, depth=300.0, model='iasp91')
     lines = data[61:89]
     self.assertEquals(len(tt), len(lines))
     # check calculated tt against original
     for i in range(len(lines)):
         parts = lines[i][13:].split()
         item = tt[i]
         self.assertEquals(item['phase_name'], parts[0].strip())
         self.assertAlmostEquals(item['time'], float(parts[1].strip()), 3)
         self.assertAlmostEquals(item['take-off angle'],
                                 float(parts[2].strip()), 3)
         self.assertAlmostEquals(item['dT/dD'], float(parts[3].strip()), 3)
         self.assertAlmostEquals(item['dT/dh'], float(parts[4].strip()), 3)
         self.assertAlmostEquals(item['d2T/dD2'], float(parts[5].strip()),
                                 2)
Пример #4
0
    def test_unrealistic_origin_depth_kills_python(self):
        """
        See #757

        It should of course not kill python...
        """
        # This just barely works.
        getTravelTimes(10, 800, model="iasp91")
        # This raises an error.
        self.assertRaises(ValueError, getTravelTimes, 10, 801, model="iasp91")
        # This just barely works.
        getTravelTimes(10, 800, model="ak135")
        # This raises an error.
        self.assertRaises(ValueError, getTravelTimes, 10, 801, model="ak135")
Пример #5
0
def get_neries_info(starttime, endtime, streams):
    events = []
    arrivals = {}
    try:
        client = neries.Client()
        events = client.getEvents(min_datetime=starttime - 20 * 60,
                                  max_datetime=endtime,
                                  format="list")
        for ev in events[::-1]:
            has_arrivals = False
            origin_time = ev['datetime']
            lon1 = ev['longitude']
            lat1 = ev['latitude']
            depth = abs(ev['depth'])
            for st in streams:
                sta = st[0].stats.station
                lon2 = st[0].stats.coordinates['longitude']
                lat2 = st[0].stats.coordinates['latitude']
                dist = locations2degrees(lat1, lon1, lat2, lon2)
                tts = getTravelTimes(dist, depth)
                list_ = arrivals.setdefault(sta, [])
                for tt in tts:
                    tt['time'] = origin_time + tt['time']
                    if starttime < tt['time'] < endtime:
                        has_arrivals = True
                        list_.append(tt)
            if not has_arrivals:
                events.remove(ev)
    except Exception as e:
        msg = ("Problem while determining theoretical phases using "
               "neries/taup: %s: %s" % (e.__class__.__name__, str(e)))
        return None, None, msg
    return events, arrivals, None
Пример #6
0
    def test_unrealistic_origin_depth_kills_python(self):
        """
        See #757

        It should of course not kill python...
        """
        # This just barely works.
        getTravelTimes(10, 800, model="iasp91")
        # This raises an error.
        self.assertRaises(ValueError, getTravelTimes, 10, 801,
                          model="iasp91")
        # This just barely works.
        getTravelTimes(10, 800, model="ak135")
        # This raises an error.
        self.assertRaises(ValueError, getTravelTimes, 10, 801,
                          model="ak135")
Пример #7
0
def traveltimes(MetaDict, Event):

    logger.info('\033[31m Enter AUTOMATIC FILTER \033[0m')
    T = []
    Wdict = {}
    SNR = {}
    for i in MetaDict:
        
        de = locations2degrees(float(Event.lat), float(Event.lon), float(i.lat), float(i.lon))
        tt = getTravelTimes(delta=de, depth=float(Event.depth), model='ak135')
        if tt[0]['phase_name'] == 'P':
                        ptime = tt[0]['time']
                        T.append(ptime)

        logger.info('\033[31m \n\n+++++++++++++++++++++++++++++++++++++++++++++++++++ \033[0m') 
        print i.getName(), i.lat, i.lon, ptime
        ttime = ptime
        tw = calculateTimeWindows(ptime, Event)
        w, snr = readWaveformsCross(i, tw, Event, ttime)
        Wdict[i.getName()] = w
        SNR[i.getName()] = snr
    
    logger.info('\033[31m Exit AUTOMATIC FILTER \033[0m')
    
    return Wdict, SNR
Пример #8
0
def get_dist_p_s(dist, evdp, model):
    '''
    function to get the p and s travel time for 3 different models
    '''
    p_time = []
    s_time = []
    if model not in ['ak135', 'iasp91', 'common']:
        print 'model must be (1) iasp91, (2) ak135, (3) common'
        print 'use model common'

    for d in dist:
        epi_val = d

        if model == 'iasp91' or model == 'ak135':
            delta = d / 111.1
            tt = getTravelTimes(delta=delta, depth=evdp, model=model)
            p_t = (item for item in tt if item["phase_name"] == "p").next()
            s_t = (item for item in tt if item["phase_name"] == "s").next()
            p_time.append(p_t['time'])
            s_time.append(s_t['time'])

        else:
            model = 'common'
            p_t, s_t = P_S_arrival_T_common(epi_val, evdp)
            p_time.append(p_t)
            s_time.append(s_t)
    dist_p = dist
    dist_s = dist
    return dist_p, p_time, dist_s, s_time, model
Пример #9
0
def get_event_info(starttime, endtime, streams):
    events = []
    arrivals = {}
    try:
        client = FDSNClient("NERIES")
        events = client.get_events(starttime=starttime - 20 * 60,
                                   endtime=endtime)
        for ev in events[::-1]:
            has_arrivals = False
            origin = ev.origins[0]
            origin_time = origin.time
            lon1 = origin.longitude
            lat1 = origin.latitude
            depth = abs(origin.depth / 1e3)
            for st in streams:
                sta = st[0].stats.station
                lon2 = st[0].stats.coordinates['longitude']
                lat2 = st[0].stats.coordinates['latitude']
                dist = locations2degrees(lat1, lon1, lat2, lon2)
                tts = getTravelTimes(dist, depth)
                list_ = arrivals.setdefault(sta, [])
                for tt in tts:
                    tt['time'] = origin_time + tt['time']
                    if starttime < tt['time'] < endtime:
                        has_arrivals = True
                        list_.append(tt)
            if not has_arrivals:
                events[:] = events[:-1]
    except Exception as e:
        msg = ("Problem while fetching events or determining theoretical "
               "phases: %s: %s" % (e.__class__.__name__, str(e)))
        return None, None, msg
    return events, arrivals, None
Пример #10
0
def refTrigger(Waveform,Event,Meta):
    
    print Event
    name = ('%s.%s.%s.%s')%(Waveform[0].stats.network,Waveform[0].stats.station,Waveform[0].stats.location,Waveform[0].stats.channel)
    
    i = searchMeta(name,Meta)
    print i
    
    de = locations2degrees(float(Event.lat), float(Event.lon), float(i.lat), float(i.lon))
    tt = getTravelTimes(delta=de, depth=float(Event.depth), model='ak135')
    ptime = 0

    if tt[0]['phase_name'] == 'P':
       ptime = tt[0]['time']
    
    tw  = calculateTimeWindows (ptime, Event)
    stP = readWaveformsPicker  (i, tw, Event, ptime)
    trP = stP[0]
    
    cft = recSTALTA(trP.data, int(1 * trP.stats.sampling_rate), int(10 * trP.stats.sampling_rate))
    t = triggerOnset(cft,6,1.5)
    print len(trP),t,type(t)
    onset = t[0][0]/trP.stats.sampling_rate
    
    print 'TRIGGER ',trP.stats.starttime+onset
    print 'THEORETICAL: ',UTCDateTime(Event.time)+ptime
    tdiff = (trP.stats.starttime+onset)-(UTCDateTime(Event.time)+ptime) 
    #plotTrigger(trP,cft,6,1.5)
    print tdiff
    
    return tdiff
Пример #11
0
def calculate_time_phase(event, sta):
    """
    calculate arrival time of the requested phase to use in retrieving
    waveforms.
    :param event:
    :param sta:
    :return:
    """

    ev_lat = event['latitude']
    ev_lon = event['longitude']
    ev_dp = abs(float(event['depth']))
    sta_lat = float(sta[4])
    sta_lon = float(sta[5])
    delta = locations2degrees(ev_lat, ev_lon, sta_lat, sta_lon)
    tt = taup.getTravelTimes(delta, ev_dp)
    phase_list = ['P', 'Pdiff', 'PKIKP']

    time_ph = 0
    flag = False
    for ph in phase_list:
        for i in range(len(tt)):
            if tt[i]['phase_name'] == ph:
                flag = True
                time_ph = tt[i]['time']
                break
            else:
                continue
        if flag:
            print 'Phase: %s' % ph
            break
    t_start = event['t1'] + time_ph
    t_end = event['t2'] + time_ph
    return t_start, t_end
Пример #12
0
def get_event_info(starttime, endtime, streams):
    events = []
    arrivals = {}
    try:
        client = FDSNClient("NERIES")
        events = client.get_events(starttime=starttime - 20 * 60,
                                   endtime=endtime)
        for ev in events[::-1]:
            has_arrivals = False
            origin = ev.origins[0]
            origin_time = origin.time
            lon1 = origin.longitude
            lat1 = origin.latitude
            depth = abs(origin.depth / 1e3)
            for st in streams:
                sta = st[0].stats.station
                lon2 = st[0].stats.coordinates['longitude']
                lat2 = st[0].stats.coordinates['latitude']
                dist = locations2degrees(lat1, lon1, lat2, lon2)
                tts = getTravelTimes(dist, depth)
                list_ = arrivals.setdefault(sta, [])
                for tt in tts:
                    tt['time'] = origin_time + tt['time']
                    if starttime < tt['time'] < endtime:
                        has_arrivals = True
                        list_.append(tt)
            if not has_arrivals:
                events[:] = events[:-1]
    except Exception as e:
        msg = ("Problem while fetching events or determining theoretical "
               "phases: %s: %s" % (e.__class__.__name__, str(e)))
        return None, None, msg
    return events, arrivals, None
Пример #13
0
 def arr_time(self, epi_dist, req_phase='Pdiff'):
     tt = getTravelTimes(epi_dist, self.stats.sac.evdp, model=self.model)
     t_phase = -12345.0
     for tt_item in tt:
         if tt_item['phase_name'] == req_phase:
             t_phase = tt_item['time']
             break
     return (t_phase)
Пример #14
0
    def test_issue_with_global_state(self):
        """
        Minimal test case for an issue with global state that results in
        different results for the same call to getTravelTimes() in some
        circumstances.

        See #728 for more details.
        """
        tt_1 = getTravelTimes(delta=100, depth=0, model="ak135")

        # Some other calculation in between.
        getTravelTimes(delta=100, depth=200, model="ak135")

        tt_2 = getTravelTimes(delta=100, depth=0, model="ak135")

        # Both should be equal if everything is alright.
        self.assertEqual(tt_1, tt_2)
Пример #15
0
    def test_issue_with_global_state(self):
        """
        Minimal test case for an issue with global state that results in
        different results for the same call to getTravelTimes() in some
        circumstances.

        See #728 for more details.
        """
        tt_1 = getTravelTimes(delta=100, depth=0, model="ak135")

        # Some other calculation in between.
        getTravelTimes(delta=100, depth=200, model="ak135")

        tt_2 = getTravelTimes(delta=100, depth=0, model="ak135")

        # Both should be equal if everything is alright.
        self.assertEqual(tt_1, tt_2)
Пример #16
0
    def on_stations_listWidget_currentItemChanged(self, current, previous):
        if current is None:
            return
        wave = self.comm.query.get_matching_waveforms(
            self.current_event, self.current_iteration, self.current_station)

        event = self.comm.events.get(self.current_event)

        great_circle_distance = locations2degrees(
            event["latitude"], event["longitude"],
            wave.coordinates["latitude"], wave.coordinates["longitude"])
        tts = getTravelTimes(great_circle_distance, event["depth_in_km"],
                             model="ak135")

        windows_for_station = \
            self.current_window_manager.get_windows_for_station(
                self.current_station)

        self._reset_all_plots()

        for component in ["Z", "N", "E"]:
            plot_widget = getattr(self.ui, "%s_graph" % component.lower())
            data_tr = [tr for tr in wave.data
                       if tr.stats.channel[-1].upper() == component]
            if data_tr:
                tr = data_tr[0]
                plot_widget.data_id = tr.id
                times = tr.times()
                plot_widget.plot(times, tr.data, pen="k")
            else:
                plot_widget.data_id = None
            synth_tr = [tr for tr in wave.synthetics
                        if tr.stats.channel[-1].upper() == component]
            if synth_tr:
                tr = synth_tr[0]
                times = tr.times()
                plot_widget.plot(times, tr.data, pen="r", )

            if data_tr or synth_tr:
                for tt in tts:
                    if tt["time"] >= times[-1]:
                        continue
                    if tt["phase_name"][0].lower() == "p":
                        pen = "#008c2866"
                    else:
                        pen = "#95000066"
                    plot_widget.addLine(x=tt["time"], pen=pen, z=-10)

            plot_widget.autoRange()

            window = [_i for _i in windows_for_station
                      if _i.channel_id[-1].upper() == component]
            if window:
                plot_widget.windows = window[0]
                for win in window[0].windows:
                    WindowLinearRegionItem(win, event, parent=plot_widget)
Пример #17
0
 def getTravelTimes2(self,distance,depth):
     tt = taup.getTravelTimes(delta=1.5, depth=13.0, model='ak135')
     ptimes = []
     stimes = []
     for time in tt:
         if time['phase_name'].startswith('P'):
             ptimes.append(time['time'])
         if time['phase_name'].startswith('S'):
             stimes.append(time['time'])
     return (min(ptimes),min(stimes))
Пример #18
0
Файл: main.py Проект: preinh/RF
def event2stats(lat, lon, event, phase='P', dist_range=(30, 90)):
    phase = phase.upper()
    ori = event.origins[0]
    dist, baz, _ = gps2DistAzimuth(lat, lon,
                                   ori.latitude, ori.longitude)
    dist = kilometer2degrees(dist / 1000)
    if not dist_range[0] <= dist <= dist_range[1]:
        return
    tts = getTravelTimes(dist, ori.depth)
    tts2 = getTravelTimes(dist, 0)
    tts = [tt for tt in tts if tt['phase_name'] == phase]
    tts2 = [tt for tt in tts2 if tt['phase_name'] == phase]
    if len(tts) == 0 or len(tts2) == 0:
        raise Exception('Taup does not return phase %s at event distance %s' %
                        (phase, dist))
    onset = event.origins[0].time + tts[0]['time']
    inc = tts2[0]['take-off angle']  # approximation
    return AttribDict({'dist':dist, 'back_azimuth':baz, 'inclination': inc,
                       'onset':onset})
Пример #19
0
 def getTravelTimes2(self, distance, depth):
     tt = taup.getTravelTimes(delta=1.5, depth=13.0, model='ak135')
     ptimes = []
     stimes = []
     for time in tt:
         if time['phase_name'].startswith('P'):
             ptimes.append(time['time'])
         if time['phase_name'].startswith('S'):
             stimes.append(time['time'])
     return (min(ptimes), min(stimes))
Пример #20
0
   def delay_and_sum(self,pds_depth=660.0):
      ''' 
      shift the time axis of a receiver function trace 
      to correct for the moveout of a given phase.
      '''
      #use a reference delta = 45
      ref_deg          = 45.0
      travel_times     = getTravelTimes(ref_deg, self.ses3d_seismogram.sz/1000.0,
                                       phase_list=['P','P660s'])
      P_minus_Pds_ref  = travel_times[0]['time'] - travel_times[1]['time']

      #find delta slowness here
      travel_times     = getTravelTimes(self.delta_deg, self.ses3d_seismogram.sz/1000.0,
                                       phase_list=['P','P660s'])
      P_minus_Pds_here = travel_times[0]['time'] - travel_times[1]['time']
      time_shift       = P_minus_Pds_ref - P_minus_Pds_here
      index_shift    = int(time_shift/self.ses3d_seismogram.dt)
      shifted_trace  = np.roll(self.prf,-1*index_shift)
      self.prf       = shifted_trace
      print "trace shifted by ",time_shift, " seconds"
Пример #21
0
    def delay_and_sum(self, pds_depth=660.0):
        ''' 
      shift the time axis of a receiver function trace 
      to correct for the moveout of a given phase.
      '''
        #use a reference delta = 45
        ref_deg = 45.0
        travel_times = getTravelTimes(ref_deg,
                                      self.ses3d_seismogram.sz / 1000.0,
                                      phase_list=['P', 'P660s'])
        P_minus_Pds_ref = travel_times[0]['time'] - travel_times[1]['time']

        #find delta slowness here
        travel_times = getTravelTimes(self.delta_deg,
                                      self.ses3d_seismogram.sz / 1000.0,
                                      phase_list=['P', 'P660s'])
        P_minus_Pds_here = travel_times[0]['time'] - travel_times[1]['time']
        time_shift = P_minus_Pds_ref - P_minus_Pds_here
        index_shift = int(time_shift / self.ses3d_seismogram.dt)
        shifted_trace = np.roll(self.prf, -1 * index_shift)
        self.prf = shifted_trace
        print "trace shifted by ", time_shift, " seconds"
Пример #22
0
    def test_get_travel_times_ak135(self):
        """
        Tests getTravelTimes method using model ak135.
        """
        # read output results from original program
        filename = os.path.join(self.path, 'sample_ttimes_ak135.lst')
        with open(filename, 'rt') as fp:
            data = fp.readlines()

        # 1
        tt = getTravelTimes(delta=52.474, depth=611.0, model='ak135')[:16]
        lines = data[5:21]
        self.assertEqual(len(tt), len(lines))
        # check calculated tt against original
        for line, item in zip(lines, tt):
            parts = line[13:].split()
            self.assertEqual(item['phase_name'], parts[0].strip())
            self.assertAlmostEqual(item['time'], float(parts[1]), 1)
            # The takeoff angle is defined a bit differently in the new
            # version.
            if item["take-off angle"] < 0.0:
                item["take-off angle"] += 180.0
            self.assertAlmostEqual(item["take-off angle"], float(parts[2]), 0)
            self.assertAlmostEqual(item['dT/dD'], float(parts[3]), 1)

        # 2
        tt = getTravelTimes(delta=50.0, depth=300.0, model='ak135')[:17]
        lines = data[26:43]
        self.assertEqual(len(tt), len(lines))
        # check calculated tt against original
        for line, item in zip(lines, tt):
            parts = line[13:].split()
            self.assertEqual(item['phase_name'], parts[0].strip())
            self.assertAlmostEqual(item['time'], float(parts[1]), 1)
            if item["take-off angle"] < 0.0:
                item["take-off angle"] += 180.0
            self.assertAlmostEqual(item['take-off angle'], float(parts[2]), 0)
            self.assertAlmostEqual(item['dT/dD'], float(parts[3]), 1)
Пример #23
0
    def test_getTravelTimesIASP91(self):
        """
        Tests getTravelTimes method using model iasp91.
        """
        # read output results from original program
        filename = os.path.join(self.path, 'sample_ttimes_iasp91.lst')
        with open(filename, 'rt') as fp:
            data = fp.readlines()

        # 1
        tt = getTravelTimes(delta=52.474, depth=611.0, model='iasp91')[:16]
        lines = data[5:21]
        self.assertEqual(len(tt), len(lines))
        # check calculated tt against original
        for line, item in zip(lines, tt):
            parts = line[13:].split()
            self.assertEqual(item['phase_name'], parts[0].strip())
            self.assertAlmostEqual(item['time'], float(parts[1].strip()), 1)
            if item["take-off angle"] < 0.0:
                item["take-off angle"] += 180.0
            self.assertAlmostEqual(item['take-off angle'], float(parts[2]), 0)
            self.assertAlmostEqual(item['dT/dD'], float(parts[3]), 1)

        # 2
        tt = getTravelTimes(delta=50.0, depth=300.0, model='iasp91')[:19]
        lines = data[26:45]
        self.assertEqual(len(tt), len(lines))
        # check calculated tt against original
        for line, item in zip(lines, tt):
            parts = line[13:].split()
            self.assertEqual(item['phase_name'], parts[0].strip())
            self.assertAlmostEqual(item['time'], float(parts[1]), 1)
            if item["take-off angle"] < 0.0:
                item["take-off angle"] += 180.0
            self.assertAlmostEqual(item['take-off angle'], float(parts[2]), 0)
            self.assertAlmostEqual(item['dT/dD'], float(parts[3]), 1)
Пример #24
0
    def test_util_trace3(self):
        from obspy.taup.taup import getTravelTimes
        iasp91 = Iasp91(5., 5000)
        slat = np.array([-10, 0])
        slon = np.array([-160, 10])
        slowness = np.array([6.4, 3.2])
        azi = np.array([13., 170.])
        depth = np.array([200, 200])
        rpier, plat, plon = iasp91.pspier(depth, slat, slon, slowness, azi, phase='S')
        rpier2, plat2, plon2 = util.pspier(depth, slat, slon, slowness, azi)
        np.testing.assert_array_almost_equal(rpier, rpier2, 0)
        np.testing.assert_array_almost_equal(plat, plat2, 2)
        np.testing.assert_array_almost_equal(plon, plon2, 2)

        tp, rp, phip = iasp91.trace3(6.4, phase='P', till_turn=False)
        taup = getTravelTimes(phip[-1] * 180 / np.pi, 0)
        self.assertTrue(abs(tp[-1] - taup[0]['time']) < 2)
Пример #25
0
 def plot_traveltimes(self):
     great_circle_distance = locations2degrees(
         self.event["latitude"], self.event["longitude"],
         self.data["coordinates"]["latitude"],
         self.data["coordinates"]["longitude"])
     tts = getTravelTimes(great_circle_distance, self.event["depth_in_km"],
                          model="ak135")
     for component in ["z", "n", "e"]:
         axis = getattr(self, "plot_axis_%s" % component)
         ymin, ymax = axis.get_ylim()
         for phase in tts:
             if phase["phase_name"].lower().startswith("p"):
                 color = "green"
             else:
                 color = "red"
             axis.axvline(x=phase["time"], ymin=-1, ymax=+1,
                          color=color, alpha=0.5)
         axis.set_ylim(ymin, ymax)
Пример #26
0
    def traveltimes(self):

        logger.info('\033[31m Enter AUTOMATIC FILTER \033[0m')
        T = []
        Wdict = {}
        SNR = {}

        for i in self.StationMeta:

            #de = locations2degrees (float(self.Origin.lat), float(self.Origin.lon), float(i.lat), float(i.lon))
            de = loc2degrees(self.Origin, i)
            tt = getTravelTimes(delta=de,
                                depth=float(self.Origin.depth),
                                model='ak135')

            if tt[0]['phase_name'] == 'P':
                ptime = tt[0]['time']
                T.append(ptime)

            logger.info(
                '\033[31m \n\n+++++++++++++++++++++++++++++++++++++++++++++++++++ \033[0m'
            )
            print i.getName(), i.lat, i.lon, ptime
            ttime = ptime

            tw = self.calculateTimeWindows(ptime)
            try:
                w, snr = self.readWaveformsCross(i, tw, ttime)
                Wdict[i.getName()] = w
                SNR[i.getName()] = snr
            except:
                continue

            logger.info('\033[31m Exit AUTOMATIC FILTER \033[0m')

        return Wdict, SNR
Пример #27
0
def cc_core(ls_first, ls_second, identity_all, max_ts, print_sta):
    """
    Perform the main part of the cross correlation and creating 
    the cc.txt file
    """

    global input

    try:

        cc_open = open('./cc.txt', 'a')

        tr1 = read(ls_first)[0]

        if input['phase'] != 'N':
            evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \
                                    long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \
                                    long2 = tr1.stats.sac.stlo)

            taup_tt = taup.getTravelTimes(delta=evsta_dist,
                                          depth=tr1.stats.sac.evdp)

            phase_exist = 'N'

            for tt_item in taup_tt:
                if tt_item['phase_name'] == input['phase']:
                    print 'Requested phase:'
                    print input['phase']
                    print '------'
                    print tt_item['phase_name']
                    print 'exists in the waveform!'
                    print '-----------------------'
                    t_phase = tt_item['time']

                    phase_exist = 'Y'
                    break

        if input['phase'] == 'N' or (input['phase'] != 'N'
                                     and phase_exist == 'Y'):

            # identity of the current waveform
            identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel

            # Keep the current identity in a new variable
            id_name = identity

            try:
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            except Exception, error:
                # if it is not possible to read the identity in the second path
                # then change the network part of the identity based on
                # correction unit
                identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
                tr2 = read(os.path.join(input['second_path'], identity))[0]

            if input['resample'] != 'N':
                print 'WARNING: you are using resample!!!'
                tr1.resample(input['resample'])
                tr2.resample(input['resample'])

            if input['tw'] == 'Y':
                t_cut_1 = tr1.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr1.stats.starttime + t_phase + input['offset']
                tr1.trim(starttime=t_cut_1, endtime=t_cut_2)

                t_cut_1 = tr2.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr2.stats.starttime + t_phase + input['offset']
                tr2.trim(starttime=t_cut_1, endtime=t_cut_2)

            if input['hlfilter'] == 'Y':
                tr1.filter('lowpass', freq=input['hfreq'], corners=2)
                tr2.filter('lowpass', freq=input['hfreq'], corners=2)
                tr1.filter('highpass', freq=input['lfreq'], corners=2)
                tr2.filter('highpass', freq=input['lfreq'], corners=2)

            # normalization of all three waveforms to the
            # max(max(tr1), max(tr2), max(tr3)) to keep the scales
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max())
            '''
            maxi = max(abs(tr1.data).max(), abs(tr2.data).max())
            tr1_data = tr1.data/abs(maxi)
            tr2_data = tr2.data/abs(maxi)
            tr3_data = tr3.data/abs(maxi)
            '''
            tr1.data = tr1.data / abs(max(tr1.data))
            tr2.data = tr2.data / abs(max(tr2.data))

            cc_np = tr1.stats.sampling_rate * max_ts
            np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np))
            t_shift = float(np_shift) / tr1.stats.sampling_rate

            # scale_str shows whether the scale of the waveforms are the same or not
            # if scale_str = 'Y' then the scale is correct.
            scale_str = 'Y'

            if abs(tr1.data).max() > 2.0 * abs(tr2.data).max():
                label_tr1 = ls_first.split('/')[-2]
                label_tr2 = ls_second[0].split('/')[-2]
                print '#####################################################'
                print "Scale is not correct! " + label_tr1 + '>' + label_tr2
                print '#####################################################'
                scale_str = 'N'
            elif abs(tr2.data).max() >= 2.0 * abs(tr1.data).max():
                label_tr1 = ls_first.split('/')[-2]
                label_tr2 = ls_second[0].split('/')[-2]
                print '#####################################################'
                print "Scale is not correct! " + label_tr2 + '>' + label_tr1
                print '#####################################################'
                scale_str = 'N'

            if not str(coeff) == 'nan':
                cc_open.writelines(id_name + ',' + str(round(coeff, 4)) + ',' + str(t_shift) + \
                                                ',' + scale_str + ',' + '\n')

            print "Cross Correlation:"
            print id_name
            print "Shift:       " + str(t_shift)
            print "Coefficient: " + str(coeff)
            print print_sta
            print '------------------'

            cc_open.close()
            cc_open.close()

    except Exception, error:
        print '##################'
        print error
        print '##################'
Пример #28
0
def single_comparison():
    """
    one by one comparison of the waveforms in the first path with the second path.
    """

    client = Client()

    global input

    # identity of the waveforms (first and second paths) to be compared with each other
    identity_all = input['net'] + '.' + input['sta'] + '.' + \
                    input['loc'] + '.' + input['cha']
    ls_first = glob.glob(os.path.join(input['first_path'], identity_all))
    ls_second = glob.glob(os.path.join(input['second_path'], identity_all))

    for i in range(0, len(ls_first)):
        try:
            tr1 = read(ls_first[i])[0]

            if input['phase'] != 'N':
                evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \
                                        long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \
                                        long2 = tr1.stats.sac.stlo)

                taup_tt = taup.getTravelTimes(delta=evsta_dist,
                                              depth=tr1.stats.sac.evdp)

                phase_exist = 'N'

                for tt_item in taup_tt:
                    if tt_item['phase_name'] == input['phase']:
                        print 'Requested phase:'
                        print input['phase']
                        print '------'
                        print tt_item['phase_name']
                        print 'exists in the waveform!'
                        print '-----------------------'
                        t_phase = tt_item['time']

                        phase_exist = 'Y'
                        break

                if phase_exist != 'Y':
                    continue

            # identity of the current waveform
            identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel

            # tr1: first path, tr2: second path, tr3: Raw data
            #tr3 = read(os.path.join(input['first_path'], '..', 'BH_RAW', identity))[0]

            if input['resp_paz'] == 'Y':
                response_file = os.path.join(input['first_path'], '..',
                                             'Resp/RESP.' + identity)

                # Extract the PAZ info from response file
                paz = readRESP(response_file, unit=input['corr_unit'])

                poles = paz['poles']
                zeros = paz['zeros']
                scale_fac = paz['gain']
                sensitivity = paz['sensitivity']

                print paz

                # Convert Poles and Zeros (PAZ) to frequency response.
                h, f = pazToFreqResp(poles, zeros, scale_fac, \
                                1./tr1.stats.sampling_rate, tr1.stats.npts*2, freq=True)
                # Use the evalresp library to extract
                # instrument response information from a SEED RESP-file.
                resp = invsim.evalresp(t_samp = 1./tr1.stats.sampling_rate, \
                        nfft = tr1.stats.npts*2, filename = response_file, \
                        date = tr1.stats.starttime, units = input['corr_unit'].upper())

            # Keep the current identity in a new variable
            id_name = identity

            try:
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            except Exception, error:
                # if it is not possible to read the identity in the second path
                # then change the network part of the identity based on
                # correction unit
                identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
                tr2 = read(os.path.join(input['second_path'], identity))[0]

            if input['resample'] != 'N':
                print 'WARNING: you are using resample!!!'
                tr1.resample(input['resample'])
                tr2.resample(input['resample'])

            if input['tw'] == 'Y':
                t_cut_1 = tr1.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr1.stats.starttime + t_phase + input['offset']
                tr1.trim(starttime=t_cut_1, endtime=t_cut_2)

                t_cut_1 = tr2.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr2.stats.starttime + t_phase + input['offset']
                tr2.trim(starttime=t_cut_1, endtime=t_cut_2)

            if input['hlfilter'] == 'Y':
                tr1.filter('lowpass', freq=input['hfreq'], corners=2)
                tr2.filter('lowpass', freq=input['hfreq'], corners=2)
                tr1.filter('highpass', freq=input['lfreq'], corners=2)
                tr2.filter('highpass', freq=input['lfreq'], corners=2)

            # normalization of all three waveforms to the
            # max(max(tr1), max(tr2), max(tr3)) to keep the scales
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max())

            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max())
            #tr1_data = tr1.data/abs(maxi)
            #tr2_data = tr2.data/abs(maxi)
            #tr3_data = tr3.data/abs(maxi)

            tr1_data = tr1.data / abs(max(tr1.data))
            tr2_data = tr2.data / abs(max(tr2.data))

            #tr1_data = tr1.data
            #tr2_data = tr2.data*1e9

            print max(tr1.data)
            print max(tr2.data)

            # create time arrays for tr1, tr2 and tr3
            time_tr1 = np.arange(0, tr1.stats.npts/tr1.stats.sampling_rate, \
                                                1./tr1.stats.sampling_rate)
            time_tr2 = np.arange(0, tr2.stats.npts/tr2.stats.sampling_rate, \
                                                1./tr2.stats.sampling_rate)
            #time_tr3 = np.arange(0, tr3.stats.npts/tr3.stats.sampling_rate, \
            #                                    1./tr3.stats.sampling_rate)

            # label for plotting
            label_tr1 = ls_first[i].split('/')[-2]
            label_tr2 = ls_second[i].split('/')[-2]
            label_tr3 = 'RAW'

            if input['resp_paz'] == 'Y':
                # start plotting
                plt.figure()
                plt.subplot2grid((3, 4), (0, 0), colspan=4, rowspan=2)
                #plt.subplot(211)

            plt.plot(time_tr1, tr1_data, color='blue', label=label_tr1, lw=3)
            plt.plot(time_tr2, tr2_data, color='red', label=label_tr2, lw=3)
            #plt.plot(time_tr3, tr3_data, color = 'black', ls = '--', label = label_tr3)

            plt.xlabel('Time (sec)', fontsize='xx-large', weight='bold')

            if input['corr_unit'] == 'dis':
                ylabel_str = 'Relative Displacement'
            elif input['corr_unit'] == 'vel':
                ylabel_str = 'Relative Vel'
            elif input['corr_unit'] == 'acc':
                ylabel_str = 'Relative Acc'

            plt.ylabel(ylabel_str, fontsize='xx-large', weight='bold')

            plt.xticks(fontsize='xx-large', weight='bold')
            plt.yticks(fontsize='xx-large', weight='bold')

            plt.legend(loc=1, prop={'size': 20})

            #-------------------Cross Correlation
            # 5 seconds as total length of samples to shift for cross correlation.

            cc_np = tr1.stats.sampling_rate * 3

            np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np))

            t_shift = float(np_shift) / tr1.stats.sampling_rate

            print "Cross Correlation:"
            print "Shift:       " + str(t_shift)
            print "Coefficient: " + str(coeff)

            plt.title('Single Comparison' + '\n' + str(t_shift) + \
                        ' sec , coeff: ' + str(round(coeff, 5)) + \
                        '\n' + id_name, \
                        fontsize = 'xx-large', weight = 'bold')

            if input['resp_paz'] == 'Y':
                # -----------------------
                #plt.subplot(223)
                plt.subplot2grid((3, 4), (2, 0), colspan=2)
                '''
                plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)), \
                                            color = 'blue', label = 'RESP', lw=3)
                plt.plot(np.log10(f), np.log10(abs(h)/sensitivity), \
                                            color = 'red', label = 'PAZ', lw=3)
                '''
                plt.loglog(f, abs(resp)/(sensitivity*sensitivity), \
                                            color = 'blue', label = 'RESP', lw=3)
                plt.loglog(f, abs(h)/sensitivity, \
                                            color = 'red', label = 'PAZ', lw=3)

                #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                for j in [0]:
                    plt.axvline(np.log10(j), linestyle='--')

                #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold')
                #plt.ylabel('Amplitude\n      (power of 10)', fontsize = 'xx-large', weight = 'bold')

                plt.xlabel('Frequency [Hz]',
                           fontsize='xx-large',
                           weight='bold')
                plt.ylabel('Amplitude', fontsize='xx-large', weight='bold')

                plt.xticks(fontsize='xx-large', weight='bold')

                #plt.yticks = MaxNLocator(nbins=4)
                plt.yticks(fontsize='xx-large', weight='bold')
                plt.legend(loc=2, prop={'size': 20})

                # -----------------------
                #plt.subplot(224)
                plt.subplot2grid((3, 4), (2, 2), colspan=2)

                #take negative of imaginary part
                phase_paz = np.unwrap(np.arctan2(h.imag, h.real))
                phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real))
                #plt.plot(np.log10(f), phase_resp, color = 'blue', label = 'RESP', lw=3)
                #plt.plot(np.log10(f), phase_paz, color = 'red', label = 'PAZ', lw=3)

                plt.semilogx(f, phase_resp, color='blue', label='RESP', lw=3)
                plt.semilogx(f, phase_paz, color='red', label='PAZ', lw=3)

                #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                for j in [0.0]:
                    plt.axvline(np.log10(j), linestyle='--')

                #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold')
                plt.xlabel('Frequency [Hz]',
                           fontsize='xx-large',
                           weight='bold')
                plt.ylabel('Phase [radian]',
                           fontsize='xx-large',
                           weight='bold')

                plt.xticks(fontsize='xx-large', weight='bold')
                plt.yticks(fontsize='xx-large', weight='bold')

                plt.legend(loc=3, prop={'size': 20})

                # title, centered above both subplots
                # make more room in between subplots for the ylabel of right plot
                plt.subplots_adjust(wspace=0.4, hspace=0.3)
                """
                # -----------------------
                plt.subplot(325)
                
                plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)) - \
                                        np.log10(abs(h)/sensitivity), \
                                        color = 'black', label = 'RESP - PAZ')

                for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                    plt.axvline(np.log10(j), linestyle = '--')

                plt.xlabel('Frequency [Hz] (power of 10)')
                plt.ylabel('Amplitude (power of 10)')

                plt.legend()
                
                # -----------------------
                plt.subplot(326)
                #take negative of imaginary part
                phase_paz = np.unwrap(np.arctan2(h.imag, h.real))
                phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real))
                plt.plot(np.log10(f), np.log10(phase_resp) - np.log10(phase_paz), \
                                        color = 'black', label = 'RESP - PAZ')

                for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                    plt.axvline(np.log10(j), linestyle = '--')

                plt.xlabel('Frequency [Hz] (power of 10)')
                plt.ylabel('Phase [radian] (power of 10)')

                plt.legend()

                # title, centered above both subplots
                # make more room in between subplots for the ylabel of right plot
                plt.subplots_adjust(wspace=0.3)
                """
            plt.show()

            print str(i + 1) + '/' + str(len(ls_first))
            print ls_first[i]
            print '------------------'
            wait = raw_input(id_name)
            print '***************************'

        except Exception, error:
            print '##################'
            print error
            print '##################'
Пример #29
0
def core_inversion(
    t_d,
    cmtloc,
    periods,
    MRF,
    observed_displacements,
    trlen,
    metadata,
    trlist,
    gfdir,
    return_gfs=False,
    residuals=False,
    OnlyGetFullGF=False,
    max_t_d=200,
):
    '''
    Perform the actual W-phase inversion.

    :param float t_d: Time delay
    :param tuple cmtloc: (lat, lon, depth) of the centroid's location.
    :param tuple periods: (Ta,Tb), passband periods.
    :param array MRF: Moment rate function.
    :param array observed_displacements: Array containing concatenated traces of observed disp.
    :param array trlen: Array containing the length of each trace.
    :param dict metadata: Dictionary with the metadata of each station.
    :param list trlist: List with the station id which will contribute to the inv.
    :param gfdir: Path to the greens functions or a GreensFunctions instances.
    :param bool hdf5_flag: *True* if the greens functions are stored in HDF5, *False* if they are in SAC.
    :param bool return_gfs: *True* if the greens functions should be returned.
    :param bool residuals: *True*, return the 'raw' misfit from the least squares inversion,
        *False*, return the 'relative' misfit as a percentage: i.e. the 'raw' misfit divided by
        the norm of the synthetics.
    :param bool OnlyGetFullGF: *True*, return the greens function matrix for the maximum time delay (*max_t_d*)
        without performing the inversion, *False*, perform the inversion for the time delay given
        by *t_d*.
    :param numeric max_t_d: Maximum time delay to consider if *OnlyGetFullGF = True*.

    :return: What is returned depends on the values of the parameters as described below.

        - If *OnlyGetFullGF = True*, then just the greens function matrix.
        - Otherwise a tuple containing:

            0. moment tensor components in Nm ['RR', 'PP', 'TT', 'TP', 'RT', 'RP']
            #. misfit (percent L2 norm misfit error of the solution), and

            If *return_gfs = True*

            2. The greens function matrix also.
    '''
    logger.debug("core_inversion(t_d=%d, c=(%.2f, %.2f, %.2f), #tr=%d)", t_d, cmtloc[0], cmtloc[1], cmtloc[2], len(trlist))
    if isinstance(gfdir, GreensFunctions):
        greens = gfdir
    else:
        greens = GreensFunctions(gfdir)
    delta = greens.delta

    hyplat, hyplon, hypdep = cmtloc
    Ta, Tb = periods

    # Index of the first value that will be valid after convolution with MRF.
    first_valid_time = int(len(MRF)/2.)

    # the indices of the beginning and end of each trace in observed displacements
    indexes =  np.array(np.concatenate((np.array([0.]), np.cumsum(trlen))), dtype='int')


    if OnlyGetFullGF:
        max_t_d = int(max_t_d)
        Nst = len(trlen)
        GFmatrix = np.zeros((np.array(trlen, dtype=int).sum() + max_t_d*Nst, 5))
        tb = 0
    else:
        GFmatrix = np.zeros((np.array(trlen, dtype=int).sum(), 5))

    #### Inversion:
    for i, trid in enumerate(trlist):
        trmeta = metadata[trid]
        trlat = trmeta['latitude']
        trlon = trmeta['longitude']

        try:
            sta  = trmeta['sta']
        except KeyError:
            sta = None

        dist = locations2degrees(hyplat, hyplon, trlat, trlon)
        distm, azi, bazi  =  gps2dist_azimuth(hyplat, hyplon, trlat, trlon)
        t_p =  trmeta.get('ptime')

        if not t_p:
            from obspy.taup.taup import getTravelTimes
            t_p =  getTravelTimes(dist,hypdep)[0]['time']

        # Select greens function and perform moment tensor rotation in the azimuth
        # DETAIL: The greens functions are stored for zero azimuth (which we can do
        # because of "symetry properties of a sphere". Then to recover the correct
        # moment tensor (or equivalently synthetic trace) we rotate them. For details
        # see Kanamori and Rivera 2008.
        azi_r = -azi*np.pi/180
        synth = greens.select_rotated(trid[-1], dist, hypdep, azi_r)

        ##################################
        # Extract W phase from synthetics.

        # Use only what we think is noise to calculate and compensate the mean
        synth -= np.mean(synth[..., :60], axis=-1)[:, None]

        # Convolve with moment rate function
        synth = ndimage.convolve1d(synth, MRF, axis=-1, mode='nearest')\
            [:, first_valid_time-2:-first_valid_time]

        # Pad data back to original length
        fillvec1 = synth[:, 0, None] * np.ones(first_valid_time)
        fillvec2 = synth[:, -1, None] * np.ones(first_valid_time)
        synth = np.concatenate((fillvec1, synth, fillvec2), axis=-1)
        synth -= np.mean(synth[:, :60], axis=-1)[:, None]

        # Bandpass filter to extract W phase
        def wphase_filter(trace):
            return bandpassfilter(trace, delta, 4, 1./Tb, 1./Ta)
        synth = np.apply_along_axis(wphase_filter, -1, synth)

        if OnlyGetFullGF:
            synth = ltrim(synth, t_p - max_t_d, delta)
            synth = synth[:, :trlen[i] + max_t_d]
        else:
            synth = ltrim(synth, t_p - t_d, delta)
            synth = synth[:, :trlen[i]]

        if OnlyGetFullGF:
            ta = tb
            tb = ta + max_t_d + (indexes[i+1] - indexes[i])
        else:
            ta = indexes[i]
            tb = indexes[i+1]

        # the first of the following lines are due to constraint that volume does not change
        GFmatrix[ta:tb, 0] = synth[0][:] - synth[1][:]
        GFmatrix[ta:tb, 1] = synth[2][:] - synth[1][:]
        GFmatrix[ta:tb, 2] = synth[4][:]
        GFmatrix[ta:tb, 3] = synth[5][:]
        GFmatrix[ta:tb, 4] = synth[3][:]

    if OnlyGetFullGF:
        return GFmatrix

    # perform the inversion
    inversion = lstsq(GFmatrix, observed_displacements, rcond=None)
    M = inversion[0]

    # construct the synthetics
    syn = GFmatrix.dot(M)

    # set the 'sixth' element of the moment tensor (corresponds to the
    # constraint added 20 lines or so above)
    M = np.insert(M, 2, -M[0]-M[1])

    # extract the residuals and scale if required
    if residuals:
        misfit = float(inversion[1])
    else:
        misfit = 100.*np.sqrt(np.sum((syn-observed_displacements)**2)/np.sum(syn**2))

    if return_gfs:
        return M, misfit, GFmatrix

    return M, misfit
Пример #30
0
def main(argv=sys.argv):

    #Earth's parameters
    #~ beta = 4.e3 #m/s
    #~ rho = 3.e3 #kg/m^3
    #~ mu = rho*beta*beta

    PLotSt = [
        "IU.TRQA.00.LHZ", "IU.LVC.00.LHZ", "II.NNA.00.LHZ", "IU.RAR.00.LHZ"
    ]

    #PlotSubf = [143, 133, 123, 113, 103, 93,
    #           83, 73, 63, 53]
    PlotSubf = [6, 3]

    #Set rup_vel = 0 to have a point source solution
    RupVel = 2.1  #Chilean eq from Lay et al
    t_h = 10.  # Half duration for each sf
    noiselevel = 0.0  # L1 norm level of noise
    mu = 40e9
    #W-Phase filter
    corners = 4.
    fmin = 0.001
    fmax = 0.005

    ### Data from Chilean 2010 EQ (Same as W phase inv.)
    strike = 18.
    dip = 18.
    rake = 104.  # 109.

    rakeA = rake + 45.
    rakeB = rake - 45.

    ### Fault's grid parameters
    nsx = 21  #Number of sf along strike
    nsy = 11  #Number of sf along dip
    flen = 600.  #Fault's longitude [km] along strike
    fwid = 300.  #Fault's longitude [km] along dip
    direc = 0  #Directivity 0 = bilateral
    Min_h = 10.  #Min depth of the fault

    ### Derivated parameters:
    nsf = nsx * nsy
    sflen = flen / float(nsx)
    sfwid = fwid / float(nsy)
    swp = [1, 0, 2]  # useful to swap (lat,lon, depth)
    mindist = flen * fwid  # minimun dist to the hypcen (initializing)

    ###Chessboard
    #weight = np.load("RealSol.npy")
    weight = np.zeros(nsf)
    weight[::2] = 1
    #weight[::2] = 1
    #~ weight[10]=15
    #~ weight[5001]=10
    #~ weight[3201]=2

    ## Setting dirs and reading files.
    GFdir = "/home/roberto/data/GFS/"
    workdir = os.path.abspath(".") + "/"
    datadir = workdir + "DATA/"
    tracesfilename = workdir + "goodtraces.dat"
    tracesdir = workdir + "WPtraces/"

    try:
        reqfilename = glob.glob(workdir + '*.syn.req')[0]
    except IndexError:
        print "There is not *.syn.req file in the dir"
        sys.exit()

    basename = reqfilename.split("/")[-1][:-4]

    if not os.path.exists(tracesfilename):
        print tracesfilename, "does not exist."
        exit()

    if not os.path.exists(datadir):
        os.makedirs(datadir)

    if not os.path.exists(tracesdir):
        os.makedirs(tracesdir)

    tracesfile = open(tracesfilename)
    reqfile = open(reqfilename)

    trlist = readtraces(tracesfile)
    eqdata = readreq(reqfile)

    tracesfile.close()
    reqfile.close()

    ####Hypocentre from
    ### http://earthquake.usgs.gov/earthquakes/eqinthenews/2010/us2010tfan/
    cmteplat = -35.91  #-35.85#-36.03#-35.83
    cmteplon = -72.73  #-72.72#-72.83# -72.67
    cmtepdepth = 35.
    eq_hyp = (cmteplat, cmteplon, cmtepdepth)

    ############

    # Defining the sf system
    grid, sblt = fault_grid('CL-2010',
                            cmteplat,
                            cmteplon,
                            cmtepdepth,
                            direc,
                            Min_h,
                            strike,
                            dip,
                            rake,
                            flen,
                            fwid,
                            nsx,
                            nsy,
                            Verbose=False,
                            ffi_io=True,
                            gmt_io=True)

    print('CL-2010', cmteplat, cmteplon, cmtepdepth, direc, Min_h, strike, dip,
          rake, flen, fwid, nsx, nsy)
    print grid[0][1]
    #sys.exit()
    #This calculation is inside of the loop
    #~ NP = [strike, dip, rake]
    #~ M = np.array(NodalPlanetoMT(NP))
    #~ Mp = np.sum(M**2)/np.sqrt(2)

    #############################################################################
    ######Determining the sf closest to the hypocentre:
    min_Dist_hyp_subf = flen * fwid
    for subf in range(nsf):
        sblat = grid[subf][1]
        sblon = grid[subf][0]
        sbdepth = grid[subf][2]
        sf_hyp = (sblat, sblon, sbdepth)
        Dist_hyp_subf = hypo2dist(eq_hyp, sf_hyp)
        if Dist_hyp_subf < min_Dist_hyp_subf:
            min_Dist_hyp_subf = Dist_hyp_subf
            min_sb_hyp = sf_hyp
            hyp_subf = subf
    ####Determining trimming times:
    test_tr = read(GFdir + "H003.5/PP/GF.0001.SY.LHZ.SAC")[0]
    t0 = test_tr.stats.starttime
    TrimmingTimes = {}  # Min. Distace from the fault to each station.
    A = 0
    for trid in trlist:
        metafile = workdir + "DATA/" + "META." + trid + ".xml"
        META = DU.getMetadataFromXML(metafile)[trid]
        stlat = META['latitude']
        stlon = META['longitude']
        dist =   locations2degrees(min_sb_hyp[0],min_sb_hyp[1],\
                                   stlat,stlon)
        parrivaltime = getTravelTimes(dist, min_sb_hyp[2])[0]['time']
        ta = t0 + parrivaltime
        tb = ta + round(15. * dist)
        TrimmingTimes[trid] = (ta, tb)

    ###########################

    DIST = []
    # Ordering the stations in terms of distance
    for trid in trlist:
        metafile = workdir + "DATA/" + "META." + trid + ".xml"
        META = DU.getMetadataFromXML(metafile)[trid]
        lat = META['latitude']
        lon = META['longitude']
        trdist = locations2degrees(cmteplat, cmteplon, lat, lon)
        DIST.append(trdist)

    DistIndex = lstargsort(DIST)
    trlist = [trlist[i] for i in DistIndex]

    stdistribution = StDistandAzi(trlist, eq_hyp, workdir + "DATA/")
    StDistributionPlot(stdistribution)
    #exit()
    #Main loop

    for subf in range(nsf):
        print subf
        sflat = grid[subf][1]
        sflon = grid[subf][0]
        sfdepth = grid[subf][2]
        #~ strike = grid[subf][3] #+ 360.
        #~ dip    = grid[subf][4]
        #~ rake   = grid[subf][5] #
        NP = [strike, dip, rake]
        NPA = [strike, dip, rakeA]
        NPB = [strike, dip, rakeB]

        M = np.array(NodalPlanetoMT(NP))
        MA = np.array(NodalPlanetoMT(NPA))
        MB = np.array(NodalPlanetoMT(NPB))
        #Time delay is calculated as the time in which
        #the rupture reach the subfault

        sf_hyp = (sflat, sflon, sfdepth)
        Dist_ep_subf = hypo2dist(eq_hyp, sf_hyp)

        if Dist_ep_subf < mindist:
            mindist = Dist_ep_subf
            minsubf = subf

        if RupVel == 0:
            t_d = eqdata['time_shift']
        else:
            t_d = round(Dist_ep_subf / RupVel)  #-59.

        print sflat, sflon, sfdepth
        # Looking for the best depth dir:
        depth = []
        depthdir = []
        for file in os.listdir(GFdir):
            if file[-2:] == ".5":
                depthdir.append(file)
                depth.append(float(file[1:-2]))
        BestDirIndex = np.argsort(abs(sfdepth\
                                  - np.array(depth)))[0]
        hdir = GFdir + depthdir[BestDirIndex] + "/"

        ###

        SYN = np.array([])
        SYNA = np.array([])
        SYNB = np.array([])
        for trid in trlist:

            metafile = workdir + "DATA/" + "META." + trid + ".xml"
            META = DU.getMetadataFromXML(metafile)[trid]
            lat = META['latitude']
            lon = META['longitude']

            #Subfault loop
            #GFs Selection:
            ##Change to folloing loop

            dist = locations2degrees(sflat, sflon, lat, lon)
            azi = -np.pi / 180. * gps2DistAzimuth(lat, lon, sflat, sflon)[2]
            trPPsy,  trRRsy, trRTsy,  trTTsy = \
                                       GFSelectZ(hdir,dist)

            trROT = MTrotationZ(azi, trPPsy, trRRsy, trRTsy, trTTsy)
            orig = trROT[0].stats.starttime
            dt = trROT[0].stats.delta

            trianglen = 2. * int(t_h / dt) - 1.
            FirstValid = int(trianglen / 2.) + 1  # to delete
            window = triang(trianglen)
            window /= np.sum(window)
            #window = np.array([1.])

            parrivaltime = getTravelTimes(dist, sfdepth)[0]['time']

            t1 = TrimmingTimes[trid][0] - t_d
            t2 = TrimmingTimes[trid][1] - t_d

            for trR in trROT:
                trR.data *= 10.**-21  ## To get M in Nm
                trR.data -= trR.data[0]
                AUX1 = len(trR)
                trR.data = convolve(trR.data, window, mode='valid')
                AUX2 = len(trR)
                mean = np.mean(np.hstack((trR.data[0]*np.ones(FirstValid),\
                               trR.data[:60./trR.stats.delta*1.-FirstValid+1])))
                #mean = np.mean(trR.data[:60])
                trR.data -= mean
                trR.data = bp.bandpassfilter(trR.data,len(trR), trR.stats.delta,\
                                             corners , 1 , fmin, fmax)
                t_l = dt * 0.5 * (AUX1 - AUX2)
                trR.trim(t1 - t_l, t2 - t_l, pad=True, fill_value=trR.data[0]
                         )  #We lost t_h due to the convolution

            #~ for trR in trROT:
            #~ trR.data *= 10.**-23 ## To get M in Nm
            #~ trR.data -= trR.data[0]

            #~ trR.data = convolve(trR.data,window,mode='same')

            #~ #mean = np.mean(np.hstack((trR.data[0]*np.ones(FirstValid),\
            #~ #trR.data[:60./trR.stats.delta*1.-FirstValid+1])))
            #~ mean = np.mean(trR.data[:60])
            #~ trR.data -= mean
            #~ trR.data = bp.bandpassfilter(trR.data,len(trR), trR.stats.delta,\
            #~ corners , 1 , fmin, fmax)

            #~ trR.trim(t1,t2,pad=True, fill_value=trR.data[0])

            trROT = np.array(trROT)
            syn = np.dot(trROT.T, M)
            synA = np.dot(trROT.T, MA)
            synB = np.dot(trROT.T, MB)

            SYN = np.append(SYN, syn)
            SYNA = np.append(SYNA, synA)
            SYNB = np.append(SYNB, synB)

        print np.shape(A), np.shape(np.array([SYN]))
        if subf == 0:
            A = np.array([SYN])
            AA = np.array([SYNA])
            AB = np.array([SYNB])
        else:
            A = np.append(A, np.array([SYN]), 0)
            AA = np.append(AA, np.array([SYNA]), 0)
            AB = np.append(AB, np.array([SYNB]), 0)

    AC = np.vstack((AA, AB))
    print np.shape(AC)
    print np.shape(weight)
    B = np.dot(A.T, weight)
    stsyn = Stream()
    n = 0
    Ntraces = {}
    for trid in trlist:
        spid = trid.split(".")
        print trid
        NMIN = 1. + (TrimmingTimes[trid][1] - TrimmingTimes[trid][0]) / dt
        Ntraces[trid] = (n, NMIN + n)
        trsyn = Trace(B[n:NMIN + n])
        n += NMIN
        trsyn.stats.network = spid[0]
        trsyn.stats.station = spid[1]
        trsyn.stats.location = spid[2]
        trsyn.stats.channel = spid[3]
        trsyn = AddNoise(trsyn, level=noiselevel)
        #trsyn.stats.starttime =
        stsyn.append(trsyn)

    stsyn.write(workdir + "WPtraces/" + basename + ".decov.trim.mseed",
                format="MSEED")

    #####################################################
    # Plotting:
    #####################################################
    #we are going to reflect the y axis later, so:
    print minsubf
    hypsbloc = [minsubf / nsy, -(minsubf % nsy) - 2]

    #Creating the strike and dip axis:
    StrikeAx = np.linspace(0, flen, nsx + 1)
    DipAx = np.linspace(0, fwid, nsy + 1)
    DepthAx = DipAx * np.sin(np.pi / 180. * dip) + Min_h
    hlstrike = StrikeAx[hypsbloc[0]] + sflen * 0.5

    hldip = DipAx[hypsbloc[1]] + sfwid * 0.5
    hldepth = DepthAx[hypsbloc[1]] + sfwid * 0.5 * np.sin(np.pi / 180. * dip)

    StrikeAx = StrikeAx - hlstrike
    DipAx = DipAx - hldip

    XX, YY = np.meshgrid(StrikeAx, DepthAx)
    XX, ZZ = np.meshgrid(StrikeAx, DipAx)

    sbarea = sflen * sfwid

    SLIPS = weight.reshape(nsx, nsy).T  #[::-1,:]
    SLIPS /= mu * 1.e6 * sbarea

    ######Plot:#####################
    plt.figure()
    ax = host_subplot(111)
    im = ax.pcolor(XX, YY, SLIPS, cmap="jet")
    ax.set_ylabel('Depth [km]')
    ax.set_ylim(DepthAx[-1], DepthAx[0])

    # Creating a twin plot
    ax2 = ax.twinx()
    #im2 = ax2.pcolor(XX, ZZ, SLIPS[::-1,:], cmap="Greys")
    im2 = ax2.pcolor(XX, ZZ, SLIPS[::-1, :], cmap="jet")

    ax2.set_ylabel('Distance along the dip [km]')
    ax2.set_xlabel('Distance along the strike [km]')
    ax2.set_ylim(DipAx[0], DipAx[-1])
    ax2.set_xlim(StrikeAx[0], StrikeAx[-1])

    ax.axis["bottom"].major_ticklabels.set_visible(False)
    ax2.axis["bottom"].major_ticklabels.set_visible(False)
    ax2.axis["top"].set_visible(True)
    ax2.axis["top"].label.set_visible(True)

    divider = make_axes_locatable(ax)
    cax = divider.append_axes("bottom", size="5%", pad=0.1)
    cb = plt.colorbar(im, cax=cax, orientation="horizontal")
    cb.set_label("Slip [m]")
    ax2.plot([0], [0], '*', ms=225. / (nsy + 4))
    ax2.set_xticks(ax2.get_xticks()[1:-1])
    #ax.set_yticks(ax.get_yticks()[1:])
    #ax2.set_yticks(ax2.get_yticks()[:-1])

    #########Plotting the selected traces:
    nsp = len(PLotSt) * len(PlotSubf)
    plt.figure(figsize=(13, 11))
    plt.title("Synthetics for rake = " + str(round(rake)))
    mindis = []
    maxdis = []
    for i, trid in enumerate(PLotSt):
        x = np.arange(0, Ntraces[trid][1] - Ntraces[trid][0], dt)
        for j, subf in enumerate(PlotSubf):
            y = A[subf, Ntraces[trid][0]:Ntraces[trid][1]]
            if j == 0:
                yy = y
            else:
                yy = np.vstack((yy, y))
        maxdis.append(np.max(yy))
        mindis.append(np.min(yy))

    for i, trid in enumerate(PLotSt):
        x = np.arange(0, Ntraces[trid][1] - Ntraces[trid][0], dt)

        for j, subf in enumerate(PlotSubf):
            y = A[subf, Ntraces[trid][0]:Ntraces[trid][1]]
            plt.subplot2grid((len(PlotSubf), len(PLotSt)), (j, i))
            plt.plot(x, y, linewidth=2.5)
            if j == 0:
                plt.title(trid)
            fig = plt.gca()
            fig.axes.get_yaxis().set_ticks([])
            fig.set_ylabel(str(subf), rotation=0)
            fig.set_xlim((x[0], x[-1]))
            fig.set_ylim((mindis[i], maxdis[i]))
            if subf != PlotSubf[-1]:
                fig.axes.get_xaxis().set_ticks([])

    plt.show()
Пример #31
0
	C = S.select(component="E")
	print(C)
	Clist = []
	for a in C[0].data.tolist():
		if a > -9*10**9:
			Clist.append(float(a))
		else:
			Clist.append(float('NaN'))
	data['E'] = Clist

R = dict()
R['time_start']=datetime2matlabdn(S[0].stats.starttime.datetime)
R['time_end']=datetime2matlabdn(S[0].stats.endtime.datetime)
R['sampling_rate'] = S[0].stats.sampling_rate;
R['station'] = S[0].stats.station;
R['latitude'] = lat
R['longitude'] = lon
R['elevation'] = ele
R['data'] = data

if event:
	event_params = event.split(',')
	distance = locations2degrees(float(event_params[0]), float(event_params[1]), lat, lon)
	tt = getTravelTimes(delta=distance, depth=float(event_params[0]))
	R['first_arrival'] = datetime2matlabdn(datetime.strptime(event_params[3], "%Y-%m-%d %H:%M:%S") + timedelta(seconds=tt[0]['time']))
	
	
	
scipy.io.savemat(options.output, R)
print(S)
Пример #32
0
selv = stadic.get(net+"."+sta).get("elevation")
#print slat
#print slon

# Calculate gcarc, baz and az:
irisclient = iclient.Client()
gcbazaz = irisclient.distaz(stalat=slat, stalon=slon, evtlat=elat, evtlon=elon) 
gcarc = gcbazaz.get('distance')
baz = gcbazaz.get('backazimuth')
az = gcbazaz.get('azimuth')
#print gcarc
#print baz
#print az

# Calculate Travel Time:
ttlist = getTravelTimes(delta=gcarc, depth=edep ,model='iasp91')
for num in range(len(ttlist)):
    ttdic=ttlist[num]
    phase=ttdic.get('phase_name')
    if phase == 'P':
         ptoa=ttdic.get('take-off angle')
         ptime=ttdic.get('time')
    elif phase == 'S':
         stoa=ttdic.get('take-off angle')
         stime=ttdic.get('time')
#         print toa
#         print ptime

# Fix date format:
syear=str(year)
Пример #33
0
def select_sta():
    
    """
    Select required stations
    """
    
    global input
    
    map_proj = Basemap(projection='cyl', llcrnrlat=-90,urcrnrlat=90,\
                llcrnrlon=-180,urcrnrlon=180, resolution='c')
    ev_file = open(os.path.join(os.getcwd(), 'quake_req.txt'), 'r')
    ev_add = ev_file.read().split('\n')[:-1]
    select = open(os.path.join(os.getcwd(), input['file'] + '.dat'), 'w')
    select.close()
    
    for k in range(0, len(ev_add)):
        
        '''
        select = open(os.path.join(os.getcwd(), \
                input['file'] + '-' + ev_add[k].split('/')[-1] + \
                '.dat'), 'w')
        '''
        
        (quake_d, quake_t) = read_quake(ev_add[k])
        
        list_sta = glob.glob(os.path.join(ev_add[k], 'BH', \
                                input['identity']))
        
        for i in range(0, len(list_sta)):
            
            try:
                
                st = read(list_sta[i])
                print '***************************************'
                print str(i) + '/' + str(len(list_sta)) + ' -- ' + \
                        str(k) + '/' + str(len(ev_add))
                print list_sta[i].split('/')[-1]
                
                info_sac = st[0].stats['sac']
                
                if input['all_sta'] == None:
                    
                    dist = locations2degrees(lat1 = quake_d['lat'], \
                            long1 = quake_d['lon'], lat2 = info_sac['stla'], \
                            long2 = info_sac['stlo'])
                    tt = getTravelTimes(delta=dist, depth=quake_d['dp'], \
                                            model=input['model'])
                    
                    for m in range(0, len(tt)):
                        
                        if tt[m]['phase_name'] in input['phase']:

                            try:
                                print '--------------------'
                                print list_sta[i].split('/')[-1] + ' has ' + \
                                        tt[m]['phase_name'] + ' phase'
                                
                                if input['freq'] != None:
                                    st[0].decimate(int(round(\
                                        st[0].stats['sampling_rate'])/input['freq']), \
                                        no_filter=False)
                                    
                                    if st[0].stats['sampling_rate'] != input['freq']:
                                        print list_sta[i].split('/')[-1]
                                        print st[0].stats['sampling_rate']
                                        print '------------------------------------------'
                                '''
                                np_evt = round((events[0]['datetime'] - st[0].stats['starttime'])*st[0].stats['sampling_rate'])
                                np_pha = np_evt + round(tt[m]['time']*st[0].stats['sampling_rate'])
                                
                                select = open(Address_events + '/' + events[l]['event_id'] + '/IRIS/info/' + name_select, 'a')
                                '''
                                if tt[m]['phase_name'] != 'Pdiff':
                                    lat_1 = str(quake_d['lat'])
                                    lon_1 = str(quake_d['lon'])
                                    lat_2 = str(info_sac['stla'])
                                    lon_2 = str(info_sac['stlo'])
                                elif tt[m]['phase_name'] == 'Pdiff':
                                    dist_limit = 97.0
                                    num_gcp = 1000
                                    gcp = map_proj.gcpoints(quake_d['lon'], \
                                            quake_d['lat'], info_sac['stlo'], \
                                            info_sac['stla'], num_gcp)

                                    if dist >= dist_limit:
                                        diff_dist = dist - dist_limit
                                                        
                                        req_gcp = diff_dist*(float(num_gcp)/dist)
                                        req_gcp = round(req_gcp)/2

                                        mid_p = len(gcp[0])/2
                                        #before = int(mid_p - req_gcp)
                                        #after = int(mid_p + req_gcp)
                                        before = mid_p - int(2.0 * len(gcp[0])/dist)
                                        after = mid_p + int(2.0 * len(gcp[0])/dist)
                                        
                                        x_p, y_p = gcp
                                        lat_1 = y_p[before]
                                        lat_2 = y_p[after]
                                        lon_1 = x_p[before]
                                        lon_2 = x_p[after]
                                        
                                ph_info = tt[m]['phase_name'] + ',' + \
                                    str(dist) + ',' + \
                                    str(tt[m]['time']) + ',' + \
                                    str(st[0].stats['sampling_rate']) + ',' + \
                                    st[0].stats['network'] + ',' + \
                                    st[0].stats['station'] + \
                                    ',' + st[0].stats['location'] + ',' + \
                                    st[0].stats['channel'] + ',' + \
                                    str(info_sac['stla']) + ',' + \
                                    str(info_sac['stlo']) + ',' + \
                                    str(info_sac['stdp']) + ',' + \
                                    str(info_sac['stel']) + ',' + \
                                    str(quake_d['lat']) + ',' + \
                                    str(quake_d['lon']) + ',' + \
                                    str(quake_d['dp']) + ',' + \
                                    '-----' + ',' + \
                                    str(lat_1) + ',' + \
                                    str(lon_1) + ',' + \
                                    str(lat_2) + ',' + \
                                    str(lon_2) + ',' + \
                                    '-----' + ',' + \
                                    ev_add[k].split('/')[-1] + ',' + \
                                    list_sta[i] + '\n'
                                    
                                #select = open(os.path.join(os.getcwd(), \
                                #    input['file'] + '-' + \
                                #    ev_add[k].split('/')[-1] + '.dat'), 'a')
                                select = open(os.path.join(os.getcwd(), \
                                    input['file'] + '.dat'), 'a')
                                select.writelines(ph_info)
                                select.close()
                            
                            except Exception, e:
                                print e
                
                elif input['all_sta'] != None:
                    
                    ph_info = 'NA' + ',' + 'NA' + ',' + \
                    'NA' + ',' + \
                    str(st[0].stats['sampling_rate']) + ',' + \
                    st[0].stats['network'] + ',' + st[0].stats['station'] + \
                    ',' + st[0].stats['location'] + ',' + \
                    st[0].stats['channel'] + ',' + \
                    str(info_sac['stla']) + ',' + \
                    str(info_sac['stlo']) + ',' + \
                    str(info_sac['stdp']) + ',' + \
                    str(info_sac['stel']) + ',' + \
                    str(quake_d['lat']) + ',' + str(quake_d['lon']) + ',' + \
                    str(quake_d['dp']) + ',' + \
                    ev_add[k].split('/')[-1] + ',' + \
                    list_sta[i] + '\n'
                    
                    '''
                    select = open(os.path.join(os.getcwd(), \
                        input['file'] + '-' + \
                        ev_add[k].split('/')[-1] + '.dat'), 'a')
                    '''
                    select = open(os.path.join(os.getcwd(), \
                        input['file'] + '.dat'), 'a')
                    select.writelines(ph_info)
                    select.close()
            
            except Exception, e:
                print e
                pass
Пример #34
0
def AXISEM_Phase():
    
    """
    Create STATIONS file as an input for AXISEM
    """
    
    global input
    
    events, address_events = quake_info(input['address'], 'info')
    
    for i in range(0, len(events)):
        
        
        
        sta_ev_select = []
        sta_ev = read_station_event(address_events[i])
        
        for j in range(0, len(sta_ev[i])):
            dist = locations2degrees(lat1 = float(sta_ev[i][j][9]), \
                long1 = float(sta_ev[i][j][10]), lat2 = float(sta_ev[i][j][4]), \
                long2 = float(sta_ev[i][j][5]))
            tt = getTravelTimes(delta=dist, depth=float(sta_ev[i][j][11]), \
                                model=input['model'])
                                
            sta_ev[i][j][8] = sta_ev[i][j][0] + '_' + sta_ev[i][j][1]
            
            for m in range(0, len(tt)):
                if tt[m]['phase_name'] in input['phase']:
                    sta_ev_select.append(sta_ev[i][j])
                    
        sta_ev_req = list(unique_items(sta_ev_select))
        
        if os.path.isfile(os.path.join(address_events[i],\
                            'info', 'receivers.dat')):
            os.remove(os.path.join(address_events[i],\
                            'info', 'receivers.dat'))
        
        if os.path.isfile(os.path.join(address_events[i],\
                            'info', 'STATIONS')):
            os.remove(os.path.join(address_events[i],\
                            'info', 'STATIONS'))
        
        receivers_file = open(os.path.join(address_events[i],\
                            'info', 'receivers.dat'), 'a+') 
        
        receivers_file.writelines(str(len(sta_ev_req)) + '\n')
        for j in range(0, len(sta_ev_req)):
            STATIONS_file = open(os.path.join(address_events[i],\
                                'info', 'STATIONS'), 'a+') 
            receivers_file = open(os.path.join(address_events[i],\
                                'info', 'receivers.dat'), 'a+') 
            STATIONS_file.writelines(sta_ev_req[j][1] + \
                            ' '*(5 - len('%s' % sta_ev_req[j][0])) + '%s' \
                            % sta_ev_req[j][0] + \
                            ' '*(9 - len('%.2f' % float(sta_ev_req[j][4]))) + '%.2f' \
                            % float(sta_ev_req[j][4]) + \
                            ' '*(9 - len('%.2f' % float(sta_ev_req[j][5]))) + '%.2f' \
                            % float(sta_ev_req[j][5]) + \
                            ' '*(15 - len('0.0000000E+00')) + \
                            '0.0000000E+00' + \
                            ' '*(15 - len('0.0000000E+00')) + \
                            '0.0000000E+00' + '\n')
            receivers_file.writelines( \
                            str(round(90.0 - float(sta_ev_req[j][4]), 1)) + ' ' + \
                            str(float(sta_ev_req[j][5])) + \
                            '\n')
Пример #35
0
def AXISEM_Phase():
    """
    Create STATIONS file as an input for AXISEM
    """

    global input

    events, address_events = quake_info(input['address'], 'info')

    for i in range(0, len(events)):

        sta_ev_select = []
        sta_ev = read_station_event(address_events[i])

        for j in range(0, len(sta_ev[i])):
            dist = locations2degrees(lat1 = float(sta_ev[i][j][9]), \
                long1 = float(sta_ev[i][j][10]), lat2 = float(sta_ev[i][j][4]), \
                long2 = float(sta_ev[i][j][5]))
            tt = getTravelTimes(delta=dist, depth=float(sta_ev[i][j][11]), \
                                model=input['model'])

            sta_ev[i][j][8] = sta_ev[i][j][0] + '_' + sta_ev[i][j][1]

            for m in range(0, len(tt)):
                if tt[m]['phase_name'] in input['phase']:
                    sta_ev_select.append(sta_ev[i][j])

        sta_ev_req = list(unique_items(sta_ev_select))

        if os.path.isfile(os.path.join(address_events[i],\
                            'info', 'receivers.dat')):
            os.remove(os.path.join(address_events[i],\
                            'info', 'receivers.dat'))

        if os.path.isfile(os.path.join(address_events[i],\
                            'info', 'STATIONS')):
            os.remove(os.path.join(address_events[i],\
                            'info', 'STATIONS'))

        receivers_file = open(os.path.join(address_events[i],\
                            'info', 'receivers.dat'), 'a+')

        receivers_file.writelines(str(len(sta_ev_req)) + '\n')
        for j in range(0, len(sta_ev_req)):
            STATIONS_file = open(os.path.join(address_events[i],\
                                'info', 'STATIONS'), 'a+')
            receivers_file = open(os.path.join(address_events[i],\
                                'info', 'receivers.dat'), 'a+')
            STATIONS_file.writelines(sta_ev_req[j][1] + \
                            ' '*(5 - len('%s' % sta_ev_req[j][0])) + '%s' \
                            % sta_ev_req[j][0] + \
                            ' '*(9 - len('%.2f' % float(sta_ev_req[j][4]))) + '%.2f' \
                            % float(sta_ev_req[j][4]) + \
                            ' '*(9 - len('%.2f' % float(sta_ev_req[j][5]))) + '%.2f' \
                            % float(sta_ev_req[j][5]) + \
                            ' '*(15 - len('0.0000000E+00')) + \
                            '0.0000000E+00' + \
                            ' '*(15 - len('0.0000000E+00')) + \
                            '0.0000000E+00' + '\n')
            receivers_file.writelines( \
                            str(round(90.0 - float(sta_ev_req[j][4]), 1)) + ' ' + \
                            str(float(sta_ev_req[j][5])) + \
                            '\n')
Пример #36
0
def cc_core(ls_first, ls_second, identity_all, max_ts, print_sta):
    
    """
    Perform the main part of the cross correlation and creating 
    the cc.txt file
    """
    
    global input
    
    try:
        
        cc_open = open('./cc.txt', 'a')
        
        tr1 = read(ls_first)[0]
            
        if input['phase'] != 'N':
            evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \
                                    long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \
                                    long2 = tr1.stats.sac.stlo)
            
            taup_tt = taup.getTravelTimes(delta = evsta_dist, depth = tr1.stats.sac.evdp)
            
            phase_exist = 'N'
            
            for tt_item in taup_tt:
                if tt_item['phase_name'] == input['phase']:
                    print 'Requested phase:'
                    print input['phase']
                    print '------'
                    print tt_item['phase_name']
                    print 'exists in the waveform!'
                    print '-----------------------'
                    t_phase = tt_item['time']
                    
                    phase_exist = 'Y'
                    break
                    
        if input['phase'] == 'N' or (input['phase'] != 'N' and phase_exist == 'Y'):
            
            # identity of the current waveform
            identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
            
            # Keep the current identity in a new variable
            id_name = identity
            
            try:
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            except Exception, error:
                # if it is not possible to read the identity in the second path
                # then change the network part of the identity based on
                # correction unit
                identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            
            if input['resample'] != 'N':
                print 'WARNING: you are using resample!!!'
                tr1.resample(input['resample'])
                tr2.resample(input['resample'])
            
            if input['tw'] == 'Y':
                t_cut_1 = tr1.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr1.stats.starttime + t_phase + input['offset']
                tr1.trim(starttime = t_cut_1, endtime = t_cut_2)
                
                t_cut_1 = tr2.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr2.stats.starttime + t_phase + input['offset']
                tr2.trim(starttime = t_cut_1, endtime = t_cut_2)
            
            if input['hlfilter'] == 'Y':
                tr1.filter('lowpass', freq=input['hfreq'], corners=2)
                tr2.filter('lowpass', freq=input['hfreq'], corners=2)
                tr1.filter('highpass', freq=input['lfreq'], corners=2)
                tr2.filter('highpass', freq=input['lfreq'], corners=2)
            
            # normalization of all three waveforms to the 
            # max(max(tr1), max(tr2), max(tr3)) to keep the scales
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max())
            '''
            maxi = max(abs(tr1.data).max(), abs(tr2.data).max())
            tr1_data = tr1.data/abs(maxi)
            tr2_data = tr2.data/abs(maxi)
            tr3_data = tr3.data/abs(maxi)
            '''
            tr1.data = tr1.data/abs(max(tr1.data))
            tr2.data = tr2.data/abs(max(tr2.data))
        
            cc_np = tr1.stats.sampling_rate * max_ts
            np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np))
            t_shift = float(np_shift)/tr1.stats.sampling_rate
            
            # scale_str shows whether the scale of the waveforms are the same or not
            # if scale_str = 'Y' then the scale is correct.
            scale_str = 'Y'
            
            if abs(tr1.data).max() > 2.0 * abs(tr2.data).max():
                label_tr1 = ls_first.split('/')[-2]
                label_tr2 = ls_second[0].split('/')[-2]
                print '#####################################################'
                print "Scale is not correct! " + label_tr1 + '>' + label_tr2
                print '#####################################################'
                scale_str = 'N'
            elif abs(tr2.data).max() >= 2.0 * abs(tr1.data).max():
                label_tr1 = ls_first.split('/')[-2]
                label_tr2 = ls_second[0].split('/')[-2]
                print '#####################################################'
                print "Scale is not correct! " + label_tr2 + '>' + label_tr1
                print '#####################################################'
                scale_str = 'N'
            
            if not str(coeff) == 'nan':
                cc_open.writelines(id_name + ',' + str(round(coeff, 4)) + ',' + str(t_shift) + \
                                                ',' + scale_str + ',' + '\n')
                                
            print "Cross Correlation:"
            print id_name
            print "Shift:       " + str(t_shift)
            print "Coefficient: " + str(coeff)
            print print_sta
            print '------------------'
       
            cc_open.close()
            cc_open.close()
    
    except Exception, error:
        print '##################'
        print error
        print '##################'
ax = fig.axes[0]
xmin, xmax = ax.get_xlim()

data = {}

min_degree = round(xmin)
max_degree = round(xmax)
npoints = max_degree - min_degree + 1

degrees = np.linspace(min_degree, max_degree, npoints)
# Loop over all degrees.
for degree in degrees:
    with warnings.catch_warnings(record=True):
        warnings.simplefilter('always')
        tt = getTravelTimes(degree, depth, model, phase_list=phases)
        # Mirror if necessary.
    if degree > 180:
        degree = 180 - (degree - 180)
    for item in tt:
        phase = item['phase_name']
        if phase not in data:
            data[phase] = [[], []]
        data[phase][1].append(item['time'])
        data[phase][0].append(degree)
# Plot and some formatting.
for key, value in data.items():
    plt.plot(value[0], value[1], '.', label=key)
plt.grid()
plt.xlabel('Distance (degrees)')
plt.ylabel('Time (minutes)')
Пример #38
0
    def processing(self):
        from telef import *
        from obspy.core.util.geodetics import gps2DistAzimuth, kilometer2degrees
        from obspy.taup.taup import getTravelTimes
        
        
        filename_out, ok = QtGui.QInputDialog.getText(QtGui.QWidget(),"Arquivo de saida", "Entre com o nome: ")
        
        output = open(filename_out,'w')
        oheader1 = ['Estacao', 'Dia', 'H.Chegada', 'H. Origem','Latitude', ' Longitude', 'H', '    Mag', 'Tipo','Dist. Az.', 'Residuo','Regiao'.ljust(30)]
        oheader2 = ['       ','   ', ' hh:mm:ss ','hh:mm:ss ', ' (graus)', ' (graus) ', ' km', '   ','    ','    (graus)', '  (s)  ','      ' ] 
        output.write(self.lines_obsis[0][1:6])
        output.write('\n\n\n')        
            
        line=oheader1
        output.write('%7s %2s %7s %6s %6s %7s %3s %4s %2s %4s %3s %6s\n'%(line[0],line[1],line[2],line[3],line[4],line[5],line[6],
                                                                   line[7],line[8],line[9],line[10],line[11]))
        line=oheader2
        output.write('%7s %2s %7s %6s %6s %7s %3s %4s %2s %4s %3s %6s\n'%(line[0],line[1],line[2],line[3],line[4],line[5],line[6],
                                                                   line[7],line[8],line[9],line[10],line[11]))
        output.write('\n')
        
        cnt=0; lres=[]; lcnt=[]; elats=[]; elons=[]; emags=[]
        stlats=[]; stlons=[]; edeps=[]
        
        lines_usgs = open(self.filename_usgs, 'r').readlines()       
        
        for line_obsis in self.lines_obsis[1:]:
            hdr_obsis=self.lines_obsis[0]
            iOBSIS = getObSis(line_obsis,hdr_obsis)
            coords = getCoord(self.filename_coord, iOBSIS[0])
            stlats.append(coords[0])
            stlons.append(coords[1])

            for line in lines_usgs[1:]:
                l = line.split(',')
                otime = UTCDateTime(l[0])
                ootime = l[0]
                if otime < iOBSIS[6]:
                    elat = float(l[1])
                    elon = float(l[2])
                    if l[3] == "": 
                        dep = 0.0
                    else:
                        dep = float(l[3])
                    mag = float(l[4])                            
                    magType = l[5]
                    net = l[10]
                    elats.append(elat)
                    elons.append(elon)
                    emags.append(mag)
                    edeps.append(dep)
                    place = [l[13],l[14]]
                    delta = gps2DistAzimuth(elat,elon,coords[0],coords[1])[0]
                    delta = kilometer2degrees(delta/1000.)
                    tt = getTravelTimes(delta, dep, model='iasp91')
                    text = "FIRST ARRIVAL\n"
                    first = tt[0]
                    if first['phase_name'] == 'P':
                        text += "%s: %.1f\n" % (first['phase_name'],first['time'])
                        arriv = UTCDateTime(otime) + first['time']
                        res = arriv-iOBSIS[6]
                        cnt += 1
                        lcnt.append(cnt)
                        lres.append(res)
                        hc=str(iOBSIS[2]).zfill(2)+':'+str(iOBSIS[3]).zfill(2)+':'+str(iOBSIS[4]).zfill(2)
                        ho=str(otime.time)
                        delta=str(delta)
                        res=str(res)
                        final = [iOBSIS[0],str(iOBSIS[1]).zfill(2),hc,ho[:10].zfill(8),str(elat).zfill(8),str(elon).zfill(9),str(dep).ljust(6),str(mag).ljust(3),str(magType).ljust(3),str(delta)[:5],res[:8].ljust(8),str(place).ljust(50)]
                        line=final
                        output.write('%7s %2s %7s %6s %6s %7s %3s %4s %2s %4s %3s %6s\n'%(line[0],line[1],line[2],line[3],line[4],line[5],line[6],line[7],line[8],line[9],line[10],line[11]))
                        temp = [elat, elon, mag, str(iOBSIS[0]), coords[0], coords[1]]
                       
                    break
                    
        output.close()
         
        ares = np.array(lres)
        acnt = np.array(lcnt)
        graphics2.scatter(acnt,ares)
        
        if self.mapfull.checkState()  == QtCore.Qt.Checked:
            graphics2.plt_map_marble(elats,elons,emags,edeps,stlats,stlons)
        else:
            graphics2.plt_map(elats,elons,emags,edeps,stlats,stlons)
Пример #39
0
   def receiver_functions(self,directory,nf=1,type='earth_model',migrate='False',equator='True'):
      import seismograms as s
      import receiver_functions as rf
      import matplotlib.pylab as plt
      from obspy.taup.taup import getTravelTimes
      from obspy.core.util.geodetics import gps2DistAzimuth
      from obspy.core.util.geodetics import kilometer2degrees
     
      rec_lon  = np.zeros(self.n_recs)
      for i in range(0,self.n_recs):
         a = s.ses3d_seismogram()
         a.read(directory,self.recs[i],integrate=True)
         b = rf.receiver_function(a)

         #TODO for plotting fills
         #where = [False]*(len(b.time))

         if type == 'toy_model' :
            prf = b.get_P_rf(-10.0,60.0,0.1,type='toy_model')
         elif type == 'earth_model' :
            prf = b.get_P_rf(-100.0,100.0,0.1,rotation_method='LQT',type='earth_model',decon_type='damped_lstsq')

         rec_lon[i]  = b.ses3d_seismogram.ry

         #normalize max amplitude to equal nf
         scale = nf/np.amax(b.prf)
         #align on P410s arrival (i.e., moveout correction)
         ref_delta      = 45
         #ref_slowness   = 7.7595   #slowness for P wave at 45 degrees
         #ref_d_slowness = 0.1042   #P - P410s slowness at 45 degrees
         ref_slowness    = 7.7595 
         ref_d_slowness  = 0.2088
         tt = getTravelTimes(delta=b.delta_deg,depth=b.ses3d_seismogram.sz/1000.0,
                             model='ak135',phase_list=['P','P660s'])
         slowness_p       = tt[0]['dT/dD']
         slowness_p410s   = tt[1]['dT/dD']
         delta_slowness   = slowness_p - slowness_p410s
         d_delta_slowness = ref_d_slowness - delta_slowness
         time_shift       = b.delta_deg * d_delta_slowness
         index_shift      = int(time_shift/b.ses3d_seismogram.dt)
         #print "delta_slowness = ", delta_slowness
         #print "time_shift, index_shift = ", time_shift, index_shift

         if migrate == 'False':
            #If you want it aligned on P660s arrivval:
            b.prf = np.roll(b.prf,index_shift)

         if migrate =='True':
            value,dep_m = b.migrate_1d()
            plt.plot(value*scale+b.ses3d_seismogram.ry,dep_m,'k')
        
         #plot
         rf_scaled = (b.prf*scale)+(b.ses3d_seismogram.ry)
         where     = rf_scaled > b.ses3d_seismogram.ry
         plt.figure(1)
         plt.plot(rf_scaled, b.time,'k')
         plt.fill_betweenx(b.time,b.ses3d_seismogram.ry,rf_scaled,where,color='k')

      plt.gca().invert_yaxis()
      plt.xlabel('distance (degrees)')
      plt.ylabel('time after P(s)')
      plt.ylim([0,100])
      plt.gca().invert_yaxis()
      plt.show()
Пример #40
0
    def on_stations_listWidget_currentItemChanged(self, current, previous):
        if current is None:
            return

        self._reset_all_plots()

        try:
            wave = self.comm.query.get_matching_waveforms(
                self.current_event, self.current_iteration,
                self.current_station)
        except Exception as e:
            for component in ["Z", "N", "E"]:
                plot_widget = getattr(self.ui, "%s_graph" % component.lower())
                plot_widget.addItem(
                    pg.TextItem(text=str(e),
                                anchor=(0.5, 0.5),
                                color=(200, 0, 0)))
            return

        event = self.comm.events.get(self.current_event)

        great_circle_distance = locations2degrees(
            event["latitude"], event["longitude"],
            wave.coordinates["latitude"], wave.coordinates["longitude"])
        tts = getTravelTimes(great_circle_distance,
                             event["depth_in_km"],
                             model="ak135")

        windows_for_station = \
            self.current_window_manager.get_windows_for_station(
                self.current_station)

        for component in ["Z", "N", "E"]:
            plot_widget = getattr(self.ui, "%s_graph" % component.lower())
            data_tr = [
                tr for tr in wave.data
                if tr.stats.channel[-1].upper() == component
            ]
            if data_tr:
                tr = data_tr[0]
                plot_widget.data_id = tr.id
                times = tr.times()
                plot_widget.plot(times, tr.data, pen="k")
            else:
                plot_widget.data_id = None
            synth_tr = [
                _i for _i in wave.synthetics
                if _i.stats.channel[-1].upper() == component
            ]
            if synth_tr:
                tr = synth_tr[0]
                times = tr.times()
                plot_widget.plot(
                    times,
                    tr.data,
                    pen="r",
                )

            if data_tr or synth_tr:
                for tt in tts:
                    if tt["time"] >= times[-1]:
                        continue
                    if tt["phase_name"][0].lower() == "p":
                        pen = "#008c2866"
                    else:
                        pen = "#95000066"
                    plot_widget.addLine(x=tt["time"], pen=pen, z=-10)

            plot_widget.autoRange()

            window = [
                _i for _i in windows_for_station
                if _i.channel_id[-1].upper() == component
            ]
            if window:
                plot_widget.windows = window[0]
                for win in window[0].windows:
                    WindowLinearRegionItem(win, event, parent=plot_widget)

        self._update_raypath(wave.coordinates)
Пример #41
0
    for k in range(len(earthquakes)):
        event=earthquakes[k].split('/')[-1].split('.')[0]
        print event
        #Get hypocenter
        hypo=genfromtxt(path+'event_info/'+event+'.hypo')
        #Read station data
        stanames=genfromtxt(earthquakes[k],usecols=0,dtype='S')
        station_coords=genfromtxt(earthquakes[k],usecols=[1,2])
        #initalize
        tp=zeros(len(stanames))
        delta=zeros(len(stanames))
        for j in range(len(stanames)):
            #Get event-station distance in degrees
            delta[j]=locations2degrees(station_coords[j,1],station_coords[j,0],hypo[2],hypo[1])
            #Get p-time to site
            tt=getTravelTimes(delta[j],hypo[3])
            tp[j]=float64(tt[0]['time'])
        #Write to file
        f=open(path+'travel_times/'+event+'.tt','w')
        for j in range(len(tp)):
            line='%s\t%8.2f\n' %(stanames[j],tp[j])
            f.write(line)
        f.close()
    
    
#Plot PGD computation as a function fo distance
if plot_pgd_dist:
        for k in [0,1,2,3,4,5,6]:#range(len(earthquakes)):
            event=earthquakes[k].split('/')[-1].split('.')[0]
            print event
            #Get stations and p-times
Пример #42
0
 sta_epi = float(sta_read[i][-2])
 calc_xcorr = float(sta_read[i][2])
 calc_dt = float(sta_read[i][5])
 
 
 # read the green's function
 grf_tr = read(os.path.join(synthetic_add, ev_name, 'SAC_realName', 'grf.'+sta_name))[0]
 #event_1:
 #if not grf_tr.stats.station in ["109C", "AAK", "AAK", "ABKAR", "ACCN", "ADO", "AGMN", "AGRB", "AKGG", "AKUT", "ANMO", "ANWB", "BERG", "BESE", "BLO", "BLOW", "BMN", "BOAB", "BZN", "CCRK", "CMB", "DBO", "DLMT", "EUNU", "FACU", "FFD", "HDC", "HIA", "JSC", "LPAZ", "MGAN", "OBIP", "SC58", "SRU", "STD"]: continue
 #event_2:
 #if not grf_tr.stats.station in ["109C", "ACCN", "ACSO", "ADO", "ANMO", "ASCN", "ATE", "AUL", "BERG", "BESE", "BLO", "BLOW", "BMN", "BZN", "CCRK", "CMB", "CRAG", "DBIC", "DLMT", "DRLN", "DYA", "ESPZ", "FACU", "FFD", "GGNV", "LPW", "MTE", "PACT", "PESTR", "PPCWF", "PPTF", "RTC", "SC58", "SCO", "SRU", "STD"]: continue
 #if not grf_tr.stats.station in ["AAK", "AAK", "AKTO", "ANTO", "APE", "AQU", "HGN", "IPM", "KHC", "LSA", "MUN", "PSI", "SENIN", "SSB", "VNDA"]: continue
 #if grf_tr.stats.network in passed_nets: continue
 if min_epi <= grf_tr.stats.sac.gcarc <= max_epi:
     try:
         tt_list = taup.getTravelTimes(grf_tr.stats.sac.gcarc, grf_tr.stats.sac.evdp, model='iasp91')
         flag = 'searching'
         for j in range(len(tt_list)):
             if tt_list[j]['phase_name'] == phase:
                 phase_time = tt_list[j]['time']
                 #print '---------'
                 #print phase + ' is found and the arrival time:'
                 #print tt_list[j]['time']
                 #print '---------'
                 flag = 'found'
                 break
         if flag == 'searching': 
             print 'Could not find ' + phase + ' in: ' + str(grf_tr.stats.sac.gcarc)
             continue
         sta_name_split = sta_name.split('.')
         real_tr = read(os.path.join(real_add, ev_name, 'BH', 'dis.' + sta_name_split[1] + '.' + 
Пример #43
0
    def receiver_functions(self,
                           directory,
                           nf=1,
                           type='earth_model',
                           migrate='False',
                           equator='True'):
        import seismograms as s
        import receiver_functions as rf
        import matplotlib.pylab as plt
        from obspy.taup.taup import getTravelTimes
        from obspy.core.util.geodetics import gps2DistAzimuth
        from obspy.core.util.geodetics import kilometer2degrees

        rec_lon = np.zeros(self.n_recs)
        for i in range(0, self.n_recs):
            a = s.ses3d_seismogram()
            a.read(directory, self.recs[i], integrate=True)
            b = rf.receiver_function(a)

            #TODO for plotting fills
            #where = [False]*(len(b.time))

            if type == 'toy_model':
                prf = b.get_P_rf(-10.0, 60.0, 0.1, type='toy_model')
            elif type == 'earth_model':
                prf = b.get_P_rf(-100.0,
                                 100.0,
                                 0.1,
                                 rotation_method='LQT',
                                 type='earth_model',
                                 decon_type='damped_lstsq')

            rec_lon[i] = b.ses3d_seismogram.ry

            #normalize max amplitude to equal nf
            scale = nf / np.amax(b.prf)
            #align on P410s arrival (i.e., moveout correction)
            ref_delta = 45
            #ref_slowness   = 7.7595   #slowness for P wave at 45 degrees
            #ref_d_slowness = 0.1042   #P - P410s slowness at 45 degrees
            ref_slowness = 7.7595
            ref_d_slowness = 0.2088
            tt = getTravelTimes(delta=b.delta_deg,
                                depth=b.ses3d_seismogram.sz / 1000.0,
                                model='ak135',
                                phase_list=['P', 'P660s'])
            slowness_p = tt[0]['dT/dD']
            slowness_p410s = tt[1]['dT/dD']
            delta_slowness = slowness_p - slowness_p410s
            d_delta_slowness = ref_d_slowness - delta_slowness
            time_shift = b.delta_deg * d_delta_slowness
            index_shift = int(time_shift / b.ses3d_seismogram.dt)
            #print "delta_slowness = ", delta_slowness
            #print "time_shift, index_shift = ", time_shift, index_shift

            if migrate == 'False':
                #If you want it aligned on P660s arrivval:
                b.prf = np.roll(b.prf, index_shift)

            if migrate == 'True':
                value, dep_m = b.migrate_1d()
                plt.plot(value * scale + b.ses3d_seismogram.ry, dep_m, 'k')

            #plot
            rf_scaled = (b.prf * scale) + (b.ses3d_seismogram.ry)
            where = rf_scaled > b.ses3d_seismogram.ry
            plt.figure(1)
            plt.plot(rf_scaled, b.time, 'k')
            plt.fill_betweenx(b.time,
                              b.ses3d_seismogram.ry,
                              rf_scaled,
                              where,
                              color='k')

        plt.gca().invert_yaxis()
        plt.xlabel('distance (degrees)')
        plt.ylabel('time after P(s)')
        plt.ylim([0, 100])
        plt.gca().invert_yaxis()
        plt.show()
Пример #44
0
def main(argv=sys.argv): 
    
    #Earth's parameters 
    #~ beta = 4.e3 #m/s 
    #~ rho = 3.e3 #kg/m^3 
    #~ mu = rho*beta*beta
    
    PLotSt = ["IU.TRQA.00.LHZ",
             "IU.LVC.00.LHZ",
             "II.NNA.00.LHZ",
              "IU.RAR.00.LHZ"]
             
             
    #PlotSubf = [143, 133, 123, 113, 103, 93,
     #           83, 73, 63, 53]
    PlotSubf = [6,3]

    
    
    #Set rup_vel = 0 to have a point source solution
    RupVel = 2.1 #Chilean eq from Lay et al
    t_h     = 10. # Half duration for each sf  
    noiselevel = 0.0# L1 norm level of noise
    mu =40e9
    #W-Phase filter 
    corners = 4.
    fmin = 0.001
    fmax = 0.005
    
    ### Data from Chilean 2010 EQ (Same as W phase inv.) 
    strike = 18.
    dip    = 18.
    rake   = 104. # 109.
    
    rakeA = rake + 45.
    rakeB = rake - 45.
    
    
    ### Fault's grid parameters
    nsx   = 21 #Number of sf along strike
    nsy   = 11 #Number of sf along dip
    flen  = 600. #Fault's longitude [km] along strike
    fwid  = 300. #Fault's longitude [km] along dip
    direc = 0    #Directivity 0 = bilateral
    Min_h = 10.  #Min depth of the fault
    
    
    ### Derivated parameters:
    nsf = nsx*nsy
    sflen = flen/float(nsx)         
    sfwid = fwid/float(nsy)
    swp = [1, 0, 2] # useful to swap (lat,lon, depth)  
    mindist = flen*fwid # minimun dist to the hypcen (initializing)
    
    ###Chessboard
    #weight = np.load("RealSol.npy") 
    weight = np.zeros(nsf)
    weight[::2] = 1 
    #weight[::2] = 1 
    #~ weight[10]=15
    #~ weight[5001]=10
    #~ weight[3201]=2
    
    
    
    ## Setting dirs and reading files.
    GFdir = "/home/roberto/data/GFS/"
    workdir = os.path.abspath(".")+"/"
    datadir = workdir + "DATA/"
    tracesfilename = workdir + "goodtraces.dat"
    tracesdir = workdir + "WPtraces/"
    
    try:
        reqfilename    = glob.glob(workdir + '*.syn.req')[0]
    except IndexError:   
        print "There is not *.syn.req file in the dir"
        sys.exit()
    
    basename = reqfilename.split("/")[-1][:-4]
    
    if not os.path.exists(tracesfilename): 
        print tracesfilename, "does not exist."
        exit()
    
    if not os.path.exists(datadir):
            os.makedirs(datadir)
    
    if not os.path.exists(tracesdir):
            os.makedirs(tracesdir)
 
    tracesfile = open(tracesfilename)    
    reqfile =  open(reqfilename)    
    
    trlist = readtraces(tracesfile)
    eqdata = readreq(reqfile)    

    tracesfile.close()
    reqfile.close()   
    
    ####Hypocentre from
    ### http://earthquake.usgs.gov/earthquakes/eqinthenews/2010/us2010tfan/    
    cmteplat = -35.91#-35.85#-36.03#-35.83
    cmteplon = -72.73#-72.72#-72.83# -72.67
    cmtepdepth= 35.
    eq_hyp = (cmteplat,cmteplon,cmtepdepth)
    
    
      ############
    

    # Defining the sf system
    grid, sblt = fault_grid('CL-2010',cmteplat,cmteplon,
                            cmtepdepth, direc,
                            Min_h, strike, dip, rake, flen,fwid ,nsx,nsy,
                            Verbose=False,ffi_io=True,gmt_io=True)
    
    print ('CL-2010',cmteplat,cmteplon,
                            cmtepdepth, direc,
                            Min_h, strike, dip, rake, flen,fwid ,nsx,nsy)
    print grid[0][1]
    #sys.exit()
    #This calculation is inside of the loop
    #~ NP = [strike, dip, rake]
    #~ M = np.array(NodalPlanetoMT(NP))  
    #~ Mp = np.sum(M**2)/np.sqrt(2)    
     
    #############################################################################
    ######Determining the sf closest to the hypocentre:    
    min_Dist_hyp_subf = flen *fwid
    for subf in range(nsf):
        sblat   = grid[subf][1]
        sblon   = grid[subf][0]
        sbdepth = grid[subf][2]              
        sf_hyp =  (sblat,sblon, sbdepth)        
        Dist_hyp_subf = hypo2dist(eq_hyp,sf_hyp)
        if Dist_hyp_subf < min_Dist_hyp_subf:
            min_Dist_hyp_subf = Dist_hyp_subf
            min_sb_hyp = sf_hyp
            hyp_subf = subf
    ####Determining trimming times:    
    test_tr = read(GFdir + "H003.5/PP/GF.0001.SY.LHZ.SAC")[0]
    t0 = test_tr.stats.starttime
    TrimmingTimes = {}   # Min. Distace from the fault to each station. 
    A =0
    for trid in trlist:     
        metafile = workdir + "DATA/" + "META." + trid + ".xml"
        META = DU.getMetadataFromXML(metafile)[trid]
        stlat = META['latitude']
        stlon = META['longitude'] 
        dist =   locations2degrees(min_sb_hyp[0],min_sb_hyp[1],\
                                   stlat,stlon) 
        parrivaltime = getTravelTimes(dist,min_sb_hyp[2])[0]['time']        
        ta = t0 + parrivaltime
        tb = ta + round(15.*dist) 
        TrimmingTimes[trid] = (ta, tb)
        
    
    ###########################

      
    
    DIST = []
    # Ordering the stations in terms of distance
    for trid in trlist: 
        metafile = workdir + "DATA/" + "META." + trid + ".xml"
        META = DU.getMetadataFromXML(metafile)[trid]
        lat = META['latitude']
        lon = META['longitude']
        trdist = locations2degrees(cmteplat,
                                   cmteplon,lat,lon) 
        DIST.append(trdist)   

    DistIndex = lstargsort(DIST)
    trlist = [trlist[i] for i in DistIndex]
  
    stdistribution = StDistandAzi(trlist, eq_hyp , workdir + "DATA/")
    StDistributionPlot(stdistribution)
    #exit()
    #Main loop
   

 

        
    for subf in range(nsf):
        print subf
        sflat   = grid[subf][1]
        sflon   = grid[subf][0]           
        sfdepth = grid[subf][2]
        #~ strike = grid[subf][3] #+ 360.
        #~ dip    = grid[subf][4]
        #~ rake   = grid[subf][5] #     
        NP = [strike, dip, rake]  
        NPA = [strike, dip, rakeA]
        NPB = [strike, dip, rakeB]        


        
        M = np.array(NodalPlanetoMT(NP))   
        MA = np.array(NodalPlanetoMT(NPA)) 
        MB = np.array(NodalPlanetoMT(NPB)) 
        #Time delay is calculated as the time in which 
        #the rupture reach the subfault
            
        sf_hyp = (sflat, sflon, sfdepth) 
        Dist_ep_subf = hypo2dist(eq_hyp,sf_hyp)
        
        if Dist_ep_subf < mindist:
            mindist = Dist_ep_subf
            minsubf = subf
        
                
        if RupVel == 0:
            t_d = eqdata['time_shift']
        else:
            t_d = round(Dist_ep_subf/RupVel) #-59.
       
        print sflat, sflon, sfdepth
        # Looking for the best depth dir:
        depth = []
        depthdir = []
        for file in os.listdir(GFdir):
            if file[-2:] == ".5":
                depthdir.append(file)
                depth.append(float(file[1:-2]))            
        BestDirIndex = np.argsort(abs(sfdepth\
                                  - np.array(depth)))[0]      
        hdir = GFdir + depthdir[BestDirIndex] + "/"     
        
        ###

        SYN = np.array([])
        SYNA = np.array([])
        SYNB = np.array([])
        for trid in trlist:     
            
            metafile = workdir + "DATA/" + "META." + trid + ".xml"
            META = DU.getMetadataFromXML(metafile)[trid]
            lat = META['latitude']
            lon = META['longitude']  
            
            #Subfault loop               
            #GFs Selection:
            ##Change to folloing loop
            
            dist = locations2degrees(sflat,sflon,lat,lon)                                
            azi =  -np.pi/180.*gps2DistAzimuth(lat,lon,
                       sflat,sflon)[2] 
            trPPsy,  trRRsy, trRTsy,  trTTsy = \
                                       GFSelectZ(hdir,dist)          
            
            
 
            
            trROT =  MTrotationZ(azi, trPPsy,  trRRsy, trRTsy,  trTTsy) 
            orig = trROT[0].stats.starttime  
            dt = trROT[0].stats.delta                       

            trianglen = 2.*int(t_h/dt)-1.
            FirstValid = int(trianglen/2.) + 1 # to delete
            window = triang(trianglen)
            window /= np.sum(window)
            #window = np.array([1.])
            
      
            
            
            parrivaltime = getTravelTimes(dist,sfdepth)[0]['time']
            
            t1 = TrimmingTimes[trid][0] - t_d
            t2 = TrimmingTimes[trid][1] - t_d
            
            
            
            for trR in trROT:
                trR.data *= 10.**-21 ## To get M in Nm                   
                trR.data -= trR.data[0]
                AUX1 = len(trR)
                trR.data = convolve(trR.data,window,mode='valid') 
                AUX2 = len(trR)
                mean = np.mean(np.hstack((trR.data[0]*np.ones(FirstValid),\
                               trR.data[:60./trR.stats.delta*1.-FirstValid+1])))
                #mean = np.mean(trR.data[:60])
                trR.data -= mean      
                trR.data = bp.bandpassfilter(trR.data,len(trR), trR.stats.delta,\
                                             corners , 1 , fmin, fmax)  
                t_l = dt*0.5*(AUX1 - AUX2)                             
                trR.trim(t1-t_l,t2-t_l, pad=True, fill_value=trR.data[0])  #We lost t_h due to the convolution        
            


                   
            #~ for trR in trROT:
                #~ trR.data *= 10.**-23 ## To get M in Nm                   
                #~ trR.data -= trR.data[0]
 
                #~ trR.data = convolve(trR.data,window,mode='same') 

                #~ #mean = np.mean(np.hstack((trR.data[0]*np.ones(FirstValid),\
                               #~ #trR.data[:60./trR.stats.delta*1.-FirstValid+1])))
                #~ mean = np.mean(trR.data[:60])
                #~ trR.data -= mean      
                #~ trR.data = bp.bandpassfilter(trR.data,len(trR), trR.stats.delta,\
                                             #~ corners , 1 , fmin, fmax)  
                            
                #~ trR.trim(t1,t2,pad=True, fill_value=trR.data[0])     
           
            trROT = np.array(trROT)  
            syn  =  np.dot(trROT.T,M) 
            synA =  np.dot(trROT.T,MA)
            synB =  np.dot(trROT.T,MB)
            
            SYN = np.append(SYN,syn)  
            SYNA = np.append(SYNA,synA)
            SYNB = np.append(SYNB,synB)
            
            
        print np.shape(A), np.shape(np.array([SYN]))    
        if subf == 0: 
            A = np.array([SYN])
            AA = np.array([SYNA])
            AB = np.array([SYNB])
        else:
            A = np.append(A,np.array([SYN]),0)    
            AA = np.append(AA,np.array([SYNA]),0)
            AB = np.append(AB,np.array([SYNB]),0)
            
            
            
    AC = np.vstack((AA,AB))
    print np.shape(AC)
    print np.shape(weight)
    B = np.dot(A.T,weight)
    stsyn = Stream()
    n = 0
    Ntraces= {}
    for trid in trlist: 
        spid = trid.split(".")        
        print trid
        NMIN = 1. + (TrimmingTimes[trid][1] - TrimmingTimes[trid][0]) / dt
        Ntraces[trid] = (n,NMIN + n)
        trsyn = Trace(B[n:NMIN+n])   
        n += NMIN        
        trsyn.stats.network = spid[0]
        trsyn.stats.station = spid[1]
        trsyn.stats.location = spid[2]
        trsyn.stats.channel = spid[3] 
        trsyn = AddNoise(trsyn,level = noiselevel)
        #trsyn.stats.starttime = 
        stsyn.append(trsyn)
        
       
    stsyn.write(workdir+"WPtraces/" + basename + ".decov.trim.mseed",
                 format="MSEED")           
                
    #####################################################    
    # Plotting:
    #####################################################
    #we are going to reflect the y axis later, so:
    print minsubf
    hypsbloc = [minsubf / nsy , -(minsubf % nsy) - 2]

    #Creating the strike and dip axis:
    StrikeAx= np.linspace(0,flen,nsx+1)
    DipAx= np.linspace(0,fwid,nsy+1)
    DepthAx = DipAx*np.sin(np.pi/180.*dip) + Min_h    
    hlstrike = StrikeAx[hypsbloc[0]] + sflen*0.5
        
    hldip = DipAx[hypsbloc[1]] + sfwid*0.5 
    hldepth = DepthAx[hypsbloc[1]] + sfwid*0.5*np.sin(np.pi/180.*dip)
       
    StrikeAx = StrikeAx - hlstrike
    DipAx =     DipAx   - hldip
 

    
    XX, YY = np.meshgrid(StrikeAx, DepthAx)
    XX, ZZ = np.meshgrid(StrikeAx, DipAx )

   
    sbarea = sflen*sfwid
    
    SLIPS = weight.reshape(nsx,nsy).T#[::-1,:]
    SLIPS /= mu*1.e6*sbarea
    
    ######Plot:#####################
    plt.figure()
    ax = host_subplot(111)
    im = ax.pcolor(XX, YY, SLIPS, cmap="jet")    
    ax.set_ylabel('Depth [km]')       
    ax.set_ylim(DepthAx[-1],DepthAx[0])  
    
    # Creating a twin plot 
    ax2 = ax.twinx()
    #im2 = ax2.pcolor(XX, ZZ, SLIPS[::-1,:], cmap="Greys") 
    im2 = ax2.pcolor(XX, ZZ, SLIPS[::-1,:], cmap="jet")    
    
    ax2.set_ylabel('Distance along the dip [km]')
    ax2.set_xlabel('Distance along the strike [km]')    
    ax2.set_ylim(DipAx[0],DipAx[-1])
    ax2.set_xlim(StrikeAx[0],StrikeAx[-1])       
                         
                         
    ax.axis["bottom"].major_ticklabels.set_visible(False) 
    ax2.axis["bottom"].major_ticklabels.set_visible(False)
    ax2.axis["top"].set_visible(True)
    ax2.axis["top"].label.set_visible(True)
    
    
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("bottom", size="5%", pad=0.1)
    cb = plt.colorbar(im, cax=cax, orientation="horizontal")
    cb.set_label("Slip [m]") 
    ax2.plot([0], [0], '*', ms=225./(nsy+4))
    ax2.set_xticks(ax2.get_xticks()[1:-1])
    #ax.set_yticks(ax.get_yticks()[1:])
    #ax2.set_yticks(ax2.get_yticks()[:-1])
    

    
    #########Plotting the selected traces:
    nsp = len(PLotSt) * len(PlotSubf)
    plt.figure(figsize=(13,11))
    plt.title("Synthetics for rake = " + str(round(rake)))
    mindis = []
    maxdis = []
    for i, trid in enumerate(PLotSt):   
        x = np.arange(0,Ntraces[trid][1]-Ntraces[trid][0],
                      dt)
        for j, subf in enumerate(PlotSubf):
            y = A[subf, Ntraces[trid][0]:Ntraces[trid][1]]
            if j == 0:
                yy = y
            else:
                yy = np.vstack((yy,y))        
        maxdis.append(np.max(yy))
        mindis.append(np.min(yy))
        
    

    for i, trid in enumerate(PLotSt):   
        x = np.arange(0,Ntraces[trid][1]-Ntraces[trid][0],
                      dt)

        for j, subf in enumerate(PlotSubf):
            y = A[subf, Ntraces[trid][0]:Ntraces[trid][1]]
            plt.subplot2grid((len(PlotSubf), len(PLotSt)),
                              (j, i))                                
            plt.plot(x,y, linewidth=2.5)
            if j == 0:
                plt.title(trid)
            fig = plt.gca()            
            fig.axes.get_yaxis().set_ticks([])
            fig.set_ylabel(str(subf),rotation=0)
            fig.set_xlim((x[0],x[-1]))
            fig.set_ylim((mindis[i],maxdis[i]))
            if subf != PlotSubf[-1]:
                fig.axes.get_xaxis().set_ticks([])

    
    plt.show()
Пример #45
0
def calcTTT(Config,StationList,Origin):
    
    dimX = int(Config['dimX']) 
    dimY = int(Config['dimY']) 
    gridspacing = float(Config['gridspacing'])
    
    o_lat = float(Origin['lat'])
    o_lon = float(Origin['lon'])
    o_depth = float(Origin['depth'])

    logger.info(' BOUNDING BOX DIMX: %d  DIMY: %d  GRIDSPACING: %f \n'%(dimX,dimY,gridspacing))    
    
    
    oLator = o_lat + dimX/2
    oLonor = o_lon + dimY/2
    oLatul = 0
    oLonul = 0
    mint= 100000
    maxt=-100000
    
    TTTGridMap = {}
    
    for station in StationList:
        
        GridArray = {}
        streamID = station.net+'.'+station.sta+'.'+station.loc+'.'+station.comp
        sdelta = locations2degrees(float(o_lat), float(o_lon), float(station.lat), float(station.lon))
              
        logger.info(' STATION: %s --> DELTA: %f'% (streamID,sdelta))
        
        z=0
        for i in xrange(dimX):
            
            oLatul = o_lat -((dimX-1)/2)*gridspacing + i*gridspacing
            if z == 0 and i == 0:
                Latul = oLatul
            o=0    
            for j in xrange (dimY):
                
                oLonul = o_lon -((dimY-1)/2)*gridspacing + j*gridspacing
                if o==0 and j==0:
                    Lonul = oLonul
                
                de = locations2degrees(float(oLatul), float(oLonul), float(station.lat), float(station.lon))
                
                tt = getTravelTimes(delta=de,depth=o_depth,model='ak135')
                #print tt
                if tt[0]['phase_name'] == Config['ttphase']:
                        time = tt[0]['time']
                        #print streamID,oLatul,oLonul,' --------> ' ,time , ' -> \n'
                        
                        GridArray[(i,j)] = GridElem(oLatul, oLonul, o_depth,time,de)
                    
                        if (mint > time):
                                mint = time
                        if (maxt < time):
                                maxt = time        


        TTTGridMap[streamID] = TTTGrid(o_depth,mint,maxt,Latul,Lonul,oLator,oLonor,GridArray)
                                                        
    logger.info('\033[31m MINT: %g  MAXT: %f \033[0m'% (mint,maxt))

    return mint, maxt,TTTGridMap
Пример #46
0
    stanames=genfromtxt('/Users/dmelgar/Slip_inv/iquique_sm/data/station_info/gps.gflist',usecols=0,dtype='S')
    coords=genfromtxt('/Users/dmelgar/Slip_inv/iquique_sm/data/station_info/gps.gflist',usecols=[1,2])
    for k in range(len(stanames)):
        sta=stanames[k]
        print sta
        n=read(path+'proc/'+sta+'.LXN.sac')
        e=read(path+'proc/'+sta+'.LXE.sac')
        u=read(path+'proc/'+sta+'.LXZ.sac')
        #Low pass filter
        n[0].data=lowpass(n[0].data,fcorner,1./n[0].stats.delta,10)
        e[0].data=lowpass(e[0].data,fcorner,1./e[0].stats.delta,10)
        u[0].data=lowpass(u[0].data,fcorner,1./u[0].stats.delta,10)
        #Get station to hypocenter delta distance
        delta=locations2degrees(coords[k,1],coords[k,0],epicenter[1],epicenter[0])
        #Get p-time to site
        tt=getTravelTimes(delta,epicenter[2])
        tp=timedelta(seconds=float64(tt[0]['time']))
        #Trim
        n[0].trim(starttime=time_epi+tp-tmin,endtime=time_epi+tp+tmax)
        e[0].trim(starttime=time_epi+tp-tmin,endtime=time_epi+tp+tmax)
        u[0].trim(starttime=time_epi+tp-tmin,endtime=time_epi+tp+tmax)
        #Remove first epoch
        n[0].data=n[0].data-n[0].data[0]
        e[0].data=e[0].data-e[0].data[0]
        u[0].data=u[0].data-u[0].data[0]
        #Write to file
        n.write(path+'filt/'+sta+'.LXN.sac',format='SAC')
        e.write(path+'filt/'+sta+'.LXE.sac',format='SAC')
        u.write(path+'filt/'+sta+'.LXZ.sac',format='SAC')
        
if make_plots:
Пример #47
0
def travel_times(ref, deg=None, km=None, depth=0.): 
    """
    Get *approximate* relative travel time(s).

    Parameters
    ----------
    ref : list or tuple of strings and/or floats
        Reference phase names or horizontal velocities [km/sec].
    deg : float, optional
        Degrees of arc between two points of interest (spherical earth).
    km : float, optional
        Horizontal kilometers between two points of interest (spherical earth).
    depth : float, optional. default, 0.
        Depth (positive down) of event, in kilometers.

    Returns
    -------
    numpy.ndarray
        Relative times, in seconds, same length as "ref". NaN if requested time
        is undefined.

    Examples
    --------
    Get relative P arrival and 2.7 km/sec surface wave arrival at 35 degrees
    distance.
    >>> times = travel_times(['P', 2.7], deg=35.0)
    To get absolute window, add the origin time like:
    >>> w1, w2 = times + epoch_origin_time

    Notes
    -----
    Either deg or km must be indicated.
    The user is responsible for adding/subtracting time (such as origin
    time, pre-window noise time, etc.) from those predicted in order to define 
    a window.
    Phase travel times use ak135.

    """
    times = np.zeros(len(ref), dtype='float')
    tt = None
    for i, iref in enumerate(ref):
        if isinstance(iref, str):
            # phase time requested
            if not tt:
                if not deg:
                    deg = geod.kilometers2degrees(km)
                tt = taup.getTravelTimes(deg, depth, model='ak135')
            try:
                idx = [ph['phase_name'] for ph in tt].index(iref)
                itt = [ph['time'] for ph in tt][idx]
            except ValueError:
                # phase not found
                itt = None
        else:
            # horizontal velocity
            if not km:
                km = deg*(2*math.pi/360.0)*6371.0
            itt = km/iref
        times[i] = itt

    return times
Пример #48
0
def travel_times(ref, deg=None, km=None, depth=0.):
    """
    Get *approximate* relative travel time(s).

    Parameters
    ----------
    ref : list or tuple of strings and/or floats
        Reference phase names or horizontal velocities [km/sec].
    deg : float, optional
        Degrees of arc between two points of interest (spherical earth).
    km : float, optional
        Horizontal kilometers between two points of interest (spherical earth).
    depth : float, optional. default, 0.
        Depth (positive down) of event, in kilometers.

    Returns
    -------
    numpy.ndarray
        Relative times, in seconds, same length as "ref". NaN if requested time
        is undefined.

    Examples
    --------
    Get relative P arrival and 2.7 km/sec surface wave arrival at 35 degrees
    distance.
    >>> times = travel_times(['P', 2.7], deg=35.0)
    To get absolute window, add the origin time like:
    >>> w1, w2 = times + epoch_origin_time

    Notes
    -----
    Either deg or km must be indicated.
    The user is responsible for adding/subtracting time (such as origin
    time, pre-window noise time, etc.) from those predicted in order to define
    a window.
    Phase travel times use ak135.

    """
    times = np.zeros(len(ref), dtype='float')
    tt = None
    for i, iref in enumerate(ref):
        if isinstance(iref, str):
            # phase time requested
            if not tt:
                if not deg:
                    deg = geod.kilometers2degrees(km)
                tt = taup.getTravelTimes(deg, depth, model='ak135')
            try:
                idx = [ph['phase_name'] for ph in tt].index(iref)
                itt = [ph['time'] for ph in tt][idx]
            except ValueError:
                # phase not found
                itt = None
        else:
            # horizontal velocity
            if not km:
                km = deg * (2 * math.pi / 360.0) * 6371.0
            itt = km / iref
        times[i] = itt

    return times
Пример #49
0
def ProcessLoopS(filepath,stationnames):
    '''File processing loop for S tomography: Take the components and convert to RTZ, and delte the E and N comps'''
    
    p = os.getcwd()
    
    for station in stationnames:
        print 'Dealing with %s' %station
    
        Rstream = obspy.Stream()

        #Get all SAC files associated with that station
        sacfiles = reversed(sorted(glob.glob('*.%s..*' %station)))
        saccount = 0
        
        for sacfile in sacfiles:
           trace = read(sacfile)
           
           #Only determine the distance and back-azimuth once: This is what the saccount variable is here for
           if saccount == 0:
           
             evlat = trace[0].stats.sac.evla
             evlon = trace[0].stats.sac.evlo
             evdep = trace[0].stats.sac.evdp
             stlat = trace[0].stats.sac.stla
             stlon = trace[0].stats.sac.stlo
           
             dist = locations2degrees(evlat,evlon,stlat,stlon) #find distance from the quake to the station
             arcs = IRISclient.distaz(stalat=stlat,stalon=stlon,evtlat=evlat,evtlon=evlon)
             baz = arcs['backazimuth']
             az = arcs['azimuth']
             
             if evdep > 1e3:
                evdep = evdep/1000.0;
                
             traveltimes = getTravelTimes(dist,evdep, model='iasp91')
             
             P = 0
             S = 0

             for element in traveltimes:
               phaseinfo = element['phase_name']
               if phaseinfo == 'P':
                  Ptime = element['time']
                  P = 1
               if phaseinfo == 'S':
                  Stime = element['time']
                  S = 1
               if (P==1 and S ==1):
                  break

             try:
               P = Ptime
             except:
               Ptime = 0
             try:
               S = Stime
             except:
               Stime = 0
             
            
           #Set the P and S times, and other SAC header data
           trace[0].stats.sac.az = float(az)
           trace[0].stats.sac.baz = float(baz)

           if Ptime > 0:
            trace[0].stats.sac.t1 = Ptime
           if Stime > 0:
            trace[0].stats.sac.t2 = Stime

           trace[0].stats.sac.evdp = evdep*1000 #dbpick wants depth to be in meters
           trace[0].stats.sac.o = 0 #add origin time

           #other operations - remove the mean and resample
           trace[0].detrend('demean')
           trace[0].resample(20)
           
           if ('BHE' in sacfile) or ('BHN' in sacfile):
              Rstream += trace
              #to save disk space, delete the E and N components
              os.system('rm %s' %sacfile)
           else:
              #Write the Z component directly (writes over the existing file)
              trace.write(sacfile,format='SAC')
              #print 'Appended arrivals to %s' %sacfile
           
           saccount += 1
        
        #Convert to radial and transverse components
        try: 
           rotstream = Rstream.rotate(method='NE->RT',back_azimuth=baz)
        
           for obj in rotstream:
              nt = obj.stats.network
              sta = obj.stats.station
              channel = obj.stats.channel
              name = "vel."+str(nt)+"."+str(station)+".."+str(channel)
              obj.write(name,format="SAC")
              #print 'Written new file %s' %name
        except:
           print 'Cannot rotate files in Rstream %s' %Rstream
           
    #create antelope database - just for S waves, which are picked on the traverse.
    os.system('rm *.01.*')
    os.system('sac2db *.BHT T')
Пример #50
0
def YSPEC_Phase():
    """
    Create input file (yspec.in) for YSPEC based on the selected Phase
    """

    global input

    events, address_events = quake_info(input['address'], 'info')

    for i in range(0, len(events)):
        sta_ev_select = []
        sta_ev = read_station_event(address_events[i])

        for j in range(0, len(sta_ev[i])):
            dist = locations2degrees(lat1 = float(sta_ev[i][j][9]), \
                long1 = float(sta_ev[i][j][10]), lat2 = float(sta_ev[i][j][4]), \
                long2 = float(sta_ev[i][j][5]))
            tt = getTravelTimes(delta=dist, depth=float(sta_ev[i][j][11]), \
                                model=input['model'])

            sta_ev[i][j][8] = sta_ev[i][j][0] + '_' + sta_ev[i][j][1]

            for m in range(0, len(tt)):
                if tt[m]['phase_name'] in input['phase']:
                    sta_ev_select.append(sta_ev[i][j])

        #import ipdb; ipdb.set_trace()
        sta_ev_req = list(unique_items(sta_ev_select))


        if os.path.isfile(os.path.join(address_events[i],\
                            'info', 'yspec.in')):
            os.remove(os.path.join(address_events[i],\
                            'info', 'yspec.in'))

        shutil.copy2('./yspec.in', os.path.join(address_events[i],\
                            'info', 'yspec.in'))

        if os.path.isfile(os.path.join(address_events[i],\
                            'info', 'sta_yspec')):
            os.remove(os.path.join(address_events[i],\
                            'info', 'sta_yspec'))
        sta_yspec_open = open(os.path.join(address_events[i],\
                            'info', 'sta_yspec'), 'a+')

        for j in range(0, len(sta_ev_req)):
            sta_yspec_open.writelines(sta_ev_req[j][0] + ',' + \
                    sta_ev_req[j][1] + ',' + sta_ev_req[j][2] + ',' + \
                    sta_ev_req[j][3] + ',' + sta_ev_req[j][4] + ',' + \
                    sta_ev_req[j][5] + ',' + sta_ev_req[j][6] + ',' + \
                    sta_ev_req[j][7] + ',' + sta_ev_req[j][8] + ',' + \
                    sta_ev_req[j][9] + ',' + sta_ev_req[j][10] + ',' + \
                    sta_ev_req[j][11] + ',' + sta_ev_req[j][12] + ',\n')
        sta_yspec_open.close()

        receivers = []
        receivers.append('\n')
        for j in range(0, len(sta_ev_req)):
            receivers.append( '   ' + \
                            str(round(float(sta_ev_req[j][4]), 2)) + '    ' + \
                            str(round(float(sta_ev_req[j][5]), 2)) + \
                            '\n')

        yspecin_open = open(os.path.join(address_events[i],\
                            'info', 'yspec.in'), 'a+')

        yspecin_file = yspecin_open.readlines()

        search = '# source depth (km)'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j+1] = '  ' + \
                                str(round(float(sta_ev_req[0][11]), 2)) + '\n'
                break

        search = '# source latitude (deg)'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j+1] = '  ' + \
                                str(round(float(sta_ev_req[0][9]), 2)) + '\n'
                break

        search = '# source longitude (deg)'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j+1] = '  ' + \
                                str(round(float(sta_ev_req[0][10]), 2)) + '\n'
                break

        search = '# number of receivers'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j + 1] = '  ' + str(len(receivers) - 1) + '\n'
                break

        search = '# receiver latitudes and longitudes'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j + 1:] = receivers
                break

        yspecin_open.close()
        os.remove(os.path.join(address_events[i], 'info', 'yspec.in'))
        yspecin_open = open(os.path.join(address_events[i],\
                            'info', 'yspec.in'), 'a+')
        for j in range(0, len(yspecin_file)):
            yspecin_open.write(yspecin_file[j])
        yspecin_open.close()

        print '\n***************************************'
        print 'Following Parameters have been changed:\n'
        print 'source depth'
        print 'source latitude'
        print 'source longitude'
        print 'number of receivers'
        print 'receiver latitude and longitude\n'
        print 'Please change the rest yourself!'
        print '***************************************'
Пример #51
0
    except:
        print('Problem with verticals: ' + sta)
        continue
    # We want to get the distance of the event and of the station
    # We also want the back-azimuth
    lat,lon = getlatlon(cursta, eventtime, sp)
    dist= gps2dist_azimuth(float(cmtlat), float(cmtlon), lat, lon)
    bazi ="{0:.1f}".format(dist[2])
    dist ="{0:.1f}".format( 0.0089932 * dist[0] / 1000)
    if debug:
        print 'Here is the distance:' + str(dist)
        print 'Here is the depth:' + str(dep)

    # Here is the travel time so we can do the final trim
    # Should this be in a function to avoid it being in the main loop?
    tt = getTravelTimes(delta=float(dist), depth=dep,model='ak135') 
    firstarrival = tt[0]['time']
    for ttphase in tt:
        phasename = ttphase['phase_name']
        phasename = phasename[:1]
        if phasename == 'S':
            secondarrival = ttphase['time']
            break
    # Here we do the trim from the phases    
    if not parserval.trigger:
        for trace in vertcomps:
            newstime = trace.stats.starttime + firstarrival - bfarrival
            newetime = trace.stats.starttime + secondarrival + afarrival
            trace.trim(starttime=newstime,endtime=newetime)

Пример #52
0
def ProcessLoopP(filepath):
    '''The file processing loop associated with P wave tomography: Just deal with the BHZ files to save time'''

    sacfiles = glob.glob('*.BHZ')

    #Check to see if there is more than one location for a given station
    stationnames = []
    for sacfile in sorted(sacfiles):
      sacfilenameparts = sacfile.split('.')
      stationname = sacfilenameparts[2]

      if stationname not in stationnames:
        stationnames.append(stationname)

        trace = read(sacfile)

        evlat = trace[0].stats.sac.evla
        evlon = trace[0].stats.sac.evlo
        evdep = trace[0].stats.sac.evdp
        stlat = trace[0].stats.sac.stla
        stlon = trace[0].stats.sac.stlo

        dist = locations2degrees(evlat,evlon,stlat,stlon) #find distance from the quake to the station
        arcs = IRISclient.distaz(stalat=stlat,stalon=stlon,evtlat=evlat,evtlon=evlon)
        az = arcs['backazimuth']
        baz = arcs['azimuth']

        #If we're running this code twice in a row, need to correct the evdep accordingly. The evdep that comes from obspy will be in km, but needs
        #to be in the SAC header in meters. If the depth is already in meters in the SACfile, then it will almost certainly be >1000. This if statement check for 
        #this, and converts to km if necessary

        if evdep > 1e3:
          evdep = evdep/1000.0

        traveltimes = getTravelTimes(dist,evdep, model='iasp91')
        
        P = 0
        S = 0

        for element in traveltimes:
            phaseinfo = element['phase_name']
            if phaseinfo == 'P':
               Ptime = element['time']
               P = 1
            if phaseinfo == 'S':
               Stime = element['time']
               S = 1
            if (P==1 and S==1):
               break

        try:
            P = Ptime
        except:
            Ptime = 0
        try:
            S = Stime
        except:
            Stime = 0

        #Set the P and S times
        trace[0].stats.sac.az = float(az)
        trace[0].stats.sac.baz = float(baz)
        trace[0].stats.sac.o = 0.0 #add origin time
        
        if Ptime > 0:
          trace[0].stats.sac.t1 = Ptime
        if Stime > 0:
          trace[0].stats.sac.t2 = Stime

        trace[0].stats.sac.evdp = evdep*1000 #dbpick wants depth to be in meters

        #other operations
        trace[0].detrend('demean')

        #THE CROSS CORRELATION CODE ASSUMES A SAMPLING RATE OF 0.05 (20 SAMPLES/SECOND). IT WILL NOT WORK
        #OTHERWISE!!!

        trace[0].resample(20)

        if results.autop:

          tracestreamP = trace.copy()

          df = tracestreamP[0].stats.sampling_rate
          filter1 = 0.02
          filter2 = 0.1
          tracestreamP.filter("bandpass",freqmin=filter1,freqmax=filter2,corners=2)
          tracestreamP.taper(max_percentage=0.05, type='cosine')

          p_pick, phase_info = pkBaer(tracestreamP[0].data,df,10,2,2,10,20,6) #output from this is in samples.

          autoPtime = p_pick/df

          #Append the autopicker's time to the 
          if abs(Ptime-autoPtime) < 20:
            print 'Autopick accepted!'
            trace[0].stats.sac.t3 = autoPtime
           
        #Important - must write to the SAC file!
        trace.write(sacfile,format='SAC')
           
        print 'Appended arrivals to %s' %sacfile

      else:
        print 'Found multiple instruments at station %s. Removing all but 1' %(stationname)
        os.system('rm %s' %sacfile)


    #create antelope database for P arrivals
    os.system('sac2db *.BHZ Z')
Пример #53
0
def YSPEC_Phase():
    
    """
    Create input file (yspec.in) for YSPEC based on the selected Phase
    """
    
    global input
    
    events, address_events = quake_info(input['address'], 'info')
    
    for i in range(0, len(events)):
        sta_ev_select = []
        sta_ev = read_station_event(address_events[i])
        
        for j in range(0, len(sta_ev[i])):
            dist = locations2degrees(lat1 = float(sta_ev[i][j][9]), \
                long1 = float(sta_ev[i][j][10]), lat2 = float(sta_ev[i][j][4]), \
                long2 = float(sta_ev[i][j][5]))
            tt = getTravelTimes(delta=dist, depth=float(sta_ev[i][j][11]), \
                                model=input['model'])
                                
            sta_ev[i][j][8] = sta_ev[i][j][0] + '_' + sta_ev[i][j][1]
            
            for m in range(0, len(tt)):
                if tt[m]['phase_name'] in input['phase']:
                    sta_ev_select.append(sta_ev[i][j])
                    
        #import ipdb; ipdb.set_trace()
        sta_ev_req = list(unique_items(sta_ev_select))
        
        
        if os.path.isfile(os.path.join(address_events[i],\
                            'info', 'yspec.in')):
            os.remove(os.path.join(address_events[i],\
                            'info', 'yspec.in'))
        
        shutil.copy2('./yspec.in', os.path.join(address_events[i],\
                            'info', 'yspec.in'))
        
        if os.path.isfile(os.path.join(address_events[i],\
                            'info', 'sta_yspec')):
            os.remove(os.path.join(address_events[i],\
                            'info', 'sta_yspec'))
        sta_yspec_open = open(os.path.join(address_events[i],\
                            'info', 'sta_yspec'), 'a+') 
        
        for j in range(0, len(sta_ev_req)):
            sta_yspec_open.writelines(sta_ev_req[j][0] + ',' + \
                    sta_ev_req[j][1] + ',' + sta_ev_req[j][2] + ',' + \
                    sta_ev_req[j][3] + ',' + sta_ev_req[j][4] + ',' + \
                    sta_ev_req[j][5] + ',' + sta_ev_req[j][6] + ',' + \
                    sta_ev_req[j][7] + ',' + sta_ev_req[j][8] + ',' + \
                    sta_ev_req[j][9] + ',' + sta_ev_req[j][10] + ',' + \
                    sta_ev_req[j][11] + ',' + sta_ev_req[j][12] + ',\n')
        sta_yspec_open.close()
        
        receivers = []
        receivers.append('\n')
        for j in range(0, len(sta_ev_req)):
            receivers.append( '   ' + \
                            str(round(float(sta_ev_req[j][4]), 2)) + '    ' + \
                            str(round(float(sta_ev_req[j][5]), 2)) + \
                            '\n')

        yspecin_open = open(os.path.join(address_events[i],\
                            'info', 'yspec.in'), 'a+') 
        
        yspecin_file = yspecin_open.readlines()
        
        search = '# source depth (km)'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j+1] = '  ' + \
                                str(round(float(sta_ev_req[0][11]), 2)) + '\n'
                break
        
        search = '# source latitude (deg)'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j+1] = '  ' + \
                                str(round(float(sta_ev_req[0][9]), 2)) + '\n'
                break
                
        search = '# source longitude (deg)'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j+1] = '  ' + \
                                str(round(float(sta_ev_req[0][10]), 2)) + '\n'
                break
        
        search = '# number of receivers'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j+1] = '  ' + str(len(receivers)-1) + '\n'
                break
        
        search = '# receiver latitudes and longitudes'
        for j in range(0, len(yspecin_file)):
            if yspecin_file[j].find(search) != -1:
                yspecin_file[j+1:] = receivers
                break
        
        yspecin_open.close()
        os.remove(os.path.join(address_events[i], 'info', 'yspec.in'))
        yspecin_open = open(os.path.join(address_events[i],\
                            'info', 'yspec.in'), 'a+')
        for j in range(0, len(yspecin_file)):
            yspecin_open.write(yspecin_file[j])
        yspecin_open.close()
        
        print '\n***************************************'
        print 'Following Parameters have been changed:\n'
        print 'source depth'
        print 'source latitude'
        print 'source longitude'
        print 'number of receivers'
        print 'receiver latitude and longitude\n'
        print 'Please change the rest yourself!'
        print '***************************************'
Пример #54
0
def single_comparison():
    
    """
    one by one comparison of the waveforms in the first path with the second path.
    """
    
    client = Client()
    
    global input
    
    # identity of the waveforms (first and second paths) to be compared with each other
    identity_all = input['net'] + '.' + input['sta'] + '.' + \
                    input['loc'] + '.' + input['cha']
    ls_first = glob.glob(os.path.join(input['first_path'], identity_all))
    ls_second = glob.glob(os.path.join(input['second_path'], identity_all))
    
    for i in range(0, len(ls_first)):
        try:
            tr1 = read(ls_first[i])[0]
    
            if input['phase'] != 'N':
                evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \
                                        long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \
                                        long2 = tr1.stats.sac.stlo)
                
                taup_tt = taup.getTravelTimes(delta = evsta_dist, depth = tr1.stats.sac.evdp)
                
                phase_exist = 'N'
                
                for tt_item in taup_tt:
                    if tt_item['phase_name'] == input['phase']:
                        print 'Requested phase:'
                        print input['phase']
                        print '------'
                        print tt_item['phase_name']
                        print 'exists in the waveform!'
                        print '-----------------------'
                        t_phase = tt_item['time']
                        
                        phase_exist = 'Y'
                        break
                        
                if phase_exist != 'Y':
                    continue
            
            # identity of the current waveform
            identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
            
            # tr1: first path, tr2: second path, tr3: Raw data
            #tr3 = read(os.path.join(input['first_path'], '..', 'BH_RAW', identity))[0]
            
            if input['resp_paz'] == 'Y':
                response_file = os.path.join(input['first_path'], '..', 'Resp/RESP.' + identity)
                
                # Extract the PAZ info from response file
                paz = readRESP(response_file, unit = input['corr_unit'])
                
                poles = paz['poles']
                zeros = paz['zeros']
                scale_fac = paz['gain']
                sensitivity = paz['sensitivity']
            
                print paz
                
                # Convert Poles and Zeros (PAZ) to frequency response.
                h, f = pazToFreqResp(poles, zeros, scale_fac, \
                                1./tr1.stats.sampling_rate, tr1.stats.npts*2, freq=True)
                # Use the evalresp library to extract 
                # instrument response information from a SEED RESP-file.
                resp = invsim.evalresp(t_samp = 1./tr1.stats.sampling_rate, \
                        nfft = tr1.stats.npts*2, filename = response_file, \
                        date = tr1.stats.starttime, units = input['corr_unit'].upper())
            
            # Keep the current identity in a new variable
            id_name = identity
            
            try:
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            except Exception, error:
                # if it is not possible to read the identity in the second path
                # then change the network part of the identity based on
                # correction unit
                identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \
                        tr1.stats.location + '.' + tr1.stats.channel
                tr2 = read(os.path.join(input['second_path'], identity))[0]
            
            if input['resample'] != 'N':
                print 'WARNING: you are using resample!!!'
                tr1.resample(input['resample'])
                tr2.resample(input['resample'])
            
            if input['tw'] == 'Y':
                t_cut_1 = tr1.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr1.stats.starttime + t_phase + input['offset']
                tr1.trim(starttime = t_cut_1, endtime = t_cut_2)
                
                t_cut_1 = tr2.stats.starttime + t_phase - input['preset']
                t_cut_2 = tr2.stats.starttime + t_phase + input['offset']
                tr2.trim(starttime = t_cut_1, endtime = t_cut_2)
            
            
            if input['hlfilter'] == 'Y':
                tr1.filter('lowpass', freq=input['hfreq'], corners=2)
                tr2.filter('lowpass', freq=input['hfreq'], corners=2)
                tr1.filter('highpass', freq=input['lfreq'], corners=2)
                tr2.filter('highpass', freq=input['lfreq'], corners=2)
            
            # normalization of all three waveforms to the 
            # max(max(tr1), max(tr2), max(tr3)) to keep the scales
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max())
            
            #maxi = max(abs(tr1.data).max(), abs(tr2.data).max())
            #tr1_data = tr1.data/abs(maxi)
            #tr2_data = tr2.data/abs(maxi)
            #tr3_data = tr3.data/abs(maxi)
            
            tr1_data = tr1.data/abs(max(tr1.data))
            tr2_data = tr2.data/abs(max(tr2.data))
            
            #tr1_data = tr1.data
            #tr2_data = tr2.data*1e9
            
            print max(tr1.data)
            print max(tr2.data)
            
            # create time arrays for tr1, tr2 and tr3
            time_tr1 = np.arange(0, tr1.stats.npts/tr1.stats.sampling_rate, \
                                                1./tr1.stats.sampling_rate)
            time_tr2 = np.arange(0, tr2.stats.npts/tr2.stats.sampling_rate, \
                                                1./tr2.stats.sampling_rate)
            #time_tr3 = np.arange(0, tr3.stats.npts/tr3.stats.sampling_rate, \
            #                                    1./tr3.stats.sampling_rate)
            
            # label for plotting
            label_tr1 = ls_first[i].split('/')[-2]
            label_tr2 = ls_second[i].split('/')[-2]
            label_tr3 = 'RAW'
        
            if input['resp_paz'] == 'Y':
                # start plotting
                plt.figure()
                plt.subplot2grid((3,4), (0,0), colspan=4, rowspan=2)
                #plt.subplot(211)
            
            plt.plot(time_tr1, tr1_data, color = 'blue', label = label_tr1, lw=3)
            plt.plot(time_tr2, tr2_data, color = 'red', label = label_tr2, lw=3)
            #plt.plot(time_tr3, tr3_data, color = 'black', ls = '--', label = label_tr3)

            plt.xlabel('Time (sec)', fontsize = 'xx-large', weight = 'bold')
            
            if input['corr_unit'] == 'dis':
                ylabel_str = 'Relative Displacement'
            elif input['corr_unit'] == 'vel':
                ylabel_str = 'Relative Vel'
            elif input['corr_unit'] == 'acc':
                ylabel_str = 'Relative Acc'
            
            plt.ylabel(ylabel_str, fontsize = 'xx-large', weight = 'bold')
            
            plt.xticks(fontsize = 'xx-large', weight = 'bold')
            plt.yticks(fontsize = 'xx-large', weight = 'bold')
            
            plt.legend(loc=1,prop={'size':20})
            
            #-------------------Cross Correlation
            # 5 seconds as total length of samples to shift for cross correlation.
            
            cc_np = tr1.stats.sampling_rate * 3
            
            np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np))
            
            t_shift = float(np_shift)/tr1.stats.sampling_rate
            
            print "Cross Correlation:"
            print "Shift:       " + str(t_shift)
            print "Coefficient: " + str(coeff)
            
            plt.title('Single Comparison' + '\n' + str(t_shift) + \
                        ' sec , coeff: ' + str(round(coeff, 5)) + \
                        '\n' + id_name, \
                        fontsize = 'xx-large', weight = 'bold')
            
            if input['resp_paz'] == 'Y':
                # -----------------------
                #plt.subplot(223)
                plt.subplot2grid((3,4), (2,0), colspan=2)
                '''
                plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)), \
                                            color = 'blue', label = 'RESP', lw=3)
                plt.plot(np.log10(f), np.log10(abs(h)/sensitivity), \
                                            color = 'red', label = 'PAZ', lw=3)
                '''
                plt.loglog(f, abs(resp)/(sensitivity*sensitivity), \
                                            color = 'blue', label = 'RESP', lw=3)
                plt.loglog(f, abs(h)/sensitivity, \
                                            color = 'red', label = 'PAZ', lw=3)
                
                #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                for j in [0]:
                    plt.axvline(np.log10(j), linestyle = '--')

                #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold')
                #plt.ylabel('Amplitude\n      (power of 10)', fontsize = 'xx-large', weight = 'bold')
                
                plt.xlabel('Frequency [Hz]', fontsize = 'xx-large', weight = 'bold')
                plt.ylabel('Amplitude', fontsize = 'xx-large', weight = 'bold')
                
                plt.xticks(fontsize = 'xx-large', weight = 'bold')
                
                
                #plt.yticks = MaxNLocator(nbins=4)
                plt.yticks(fontsize = 'xx-large', weight = 'bold')
                plt.legend(loc=2,prop={'size':20})
                
                # -----------------------
                #plt.subplot(224)
                plt.subplot2grid((3,4), (2,2), colspan=2)

                #take negative of imaginary part
                phase_paz = np.unwrap(np.arctan2(h.imag, h.real))
                phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real))
                #plt.plot(np.log10(f), phase_resp, color = 'blue', label = 'RESP', lw=3)
                #plt.plot(np.log10(f), phase_paz, color = 'red', label = 'PAZ', lw=3)
                
                plt.semilogx(f, phase_resp, color = 'blue', label = 'RESP', lw=3)
                plt.semilogx(f, phase_paz, color = 'red', label = 'PAZ', lw=3)
                
                #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                for j in [0.0]:
                    plt.axvline(np.log10(j), linestyle = '--')

                #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold')
                plt.xlabel('Frequency [Hz]', fontsize = 'xx-large', weight = 'bold')
                plt.ylabel('Phase [radian]', fontsize = 'xx-large', weight = 'bold')
                
                plt.xticks(fontsize = 'xx-large', weight = 'bold')
                plt.yticks(fontsize = 'xx-large', weight = 'bold')
            
                plt.legend(loc=3,prop={'size':20})
                
                # title, centered above both subplots
                # make more room in between subplots for the ylabel of right plot
                plt.subplots_adjust(wspace=0.4, hspace=0.3)
                """
                # -----------------------
                plt.subplot(325)
                
                plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)) - \
                                        np.log10(abs(h)/sensitivity), \
                                        color = 'black', label = 'RESP - PAZ')

                for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                    plt.axvline(np.log10(j), linestyle = '--')

                plt.xlabel('Frequency [Hz] (power of 10)')
                plt.ylabel('Amplitude (power of 10)')

                plt.legend()
                
                # -----------------------
                plt.subplot(326)
                #take negative of imaginary part
                phase_paz = np.unwrap(np.arctan2(h.imag, h.real))
                phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real))
                plt.plot(np.log10(f), np.log10(phase_resp) - np.log10(phase_paz), \
                                        color = 'black', label = 'RESP - PAZ')

                for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]:
                    plt.axvline(np.log10(j), linestyle = '--')

                plt.xlabel('Frequency [Hz] (power of 10)')
                plt.ylabel('Phase [radian] (power of 10)')

                plt.legend()

                # title, centered above both subplots
                # make more room in between subplots for the ylabel of right plot
                plt.subplots_adjust(wspace=0.3)
                """
            plt.show()
                
            
            print str(i+1) + '/' + str(len(ls_first))
            print ls_first[i]
            print '------------------'
            wait = raw_input(id_name)
            print '***************************'
            
        except Exception, error:
            print '##################'
            print error
            print '##################'
Пример #55
0
    except:
        print('Problem with verticals: ' + sta)
        continue
    # We want to get the distance of the event and of the station
    # We also want the back-azimuth
    lat, lon = getlatlon(cursta, eventtime, sp)
    dist = gps2dist_azimuth(float(cmtlat), float(cmtlon), lat, lon)
    bazi = "{0:.1f}".format(dist[2])
    dist = "{0:.1f}".format(0.0089932 * dist[0] / 1000)
    if debug:
        print 'Here is the distance:' + str(dist)
        print 'Here is the depth:' + str(dep)

    # Here is the travel time so we can do the final trim
    # Should this be in a function to avoid it being in the main loop?
    tt = getTravelTimes(delta=float(dist), depth=dep, model='ak135')
    firstarrival = tt[0]['time']
    for ttphase in tt:
        phasename = ttphase['phase_name']
        phasename = phasename[:1]
        if phasename == 'S':
            secondarrival = ttphase['time']
            break
    # Here we do the trim from the phases
    if not parserval.trigger:
        for trace in vertcomps:
            newstime = trace.stats.starttime + firstarrival - bfarrival
            newetime = trace.stats.starttime + secondarrival + afarrival
            trace.trim(starttime=newstime, endtime=newetime)

    if debug:
Пример #56
0
    def update(self, autorange=False, force=False):

        try:
            self._plot_receiver()
            self._plot_event()
        except AttributeError:
            return

        if (not bool(self.ui.auto_update_check_box.checkState())
                and self.ui.finsource_tab.currentIndex() == 1 and not force):
            return

        components = ["z", "n", "e"]
        components_map = {0: ("Z", "N", "E"),
                          1: ("Z", "R", "T")}

        components_choice = int(self.ui.components_combo.currentIndex())

        label_map = {0: {"z": "vertical", "n": "east", "e": "north"},
                     1: {"z": "vertical", "n": "radial", "e": "transverse"}}

        for component in components:
            p = getattr(self.ui, "%s_graph" % component)
            p.setTitle(label_map[components_choice][component].capitalize()
                       + " component")

        if self.ui.finsource_tab.currentIndex() == 0:
            src_latitude = self.source.latitude
            src_longitude = self.source.longitude
            src_depth_in_m = self.source.depth_in_m
        else:
            src_latitude = self.finite_source.hypocenter_latitude
            src_longitude = self.finite_source.hypocenter_longitude
            src_depth_in_m = self.finite_source.hypocenter_depth_in_m

        rec = self.receiver
        try:
            # Grab resampling settings from the UI.
            if bool(self.ui.resample_check_box.checkState()):
                dt = float(self.ui.resample_factor.value())
                dt = self.instaseis_db.dt / dt
            else:
                dt = None
            if self.ui.finsource_tab.currentIndex() == 0:
                st = self.instaseis_db.get_seismograms(
                    source=self.source, receiver=self.receiver, dt=dt,
                    components=components_map[components_choice])
            elif self.ui.finsource_tab.currentIndex() == 1:
                st = self.instaseis_db.get_seismograms_finite_source(
                    sources=self.finite_source, receiver=self.receiver, dt=dt,
                    components=components_map[components_choice])

            # check filter values from the UI
            if bool(self.ui.lowpass_check_box.checkState()):
                try:
                    freq = 1.0 / float(self.ui.lowpass_period.value())
                    st.filter('lowpass', freq=freq, zerophase=True)
                except ZeroDivisionError:
                    # this happens when typing in the lowpass_period box
                    pass

            if bool(self.ui.highpass_check_box.checkState()):
                try:
                    freq = 1.0 / float(self.ui.highpass_period.value())
                    st.filter('highpass', freq=freq, zerophase=True)
                except ZeroDivisionError:
                    # this happens when typing in the highpass_period box
                    pass

        except AttributeError:
            return

        if bool(self.ui.tt_times.checkState()):
            great_circle_distance = locations2degrees(
                src_latitude, src_longitude,
                rec.latitude, rec.longitude)
            self.tts = getTravelTimes(great_circle_distance,
                                      src_depth_in_m / 1000.0, model="ak135")

        for ic, component in enumerate(components):
            plot_widget = getattr(self.ui, "%s_graph" % component.lower())
            plot_widget.clear()
            tr = st.select(component=components_map[components_choice][ic])[0]
            times = tr.times()
            plot_widget.plot(times, tr.data, pen="k")

            if bool(self.ui.tt_times.checkState()):
                for tt in self.tts:
                    if tt["time"] >= times[-1]:
                        self.tts.remove(tt)
                        continue
                    if tt["phase_name"][0].lower() == "p":
                        pen = "#008c2866"
                    else:
                        pen = "#95000066"
                    plot_widget.addLine(x=tt["time"], pen=pen, z=-10)

            if autorange:
                plot_widget.autoRange()