예제 #1
0
summary.append("#" * 79)

trig = []
mutt = []
if st:
    # preprocessing, backup original data for plotting at end
    st.merge(0)
    st.detrend("linear")
    for tr in st:
        tr.data = tr.data * cosTaper(len(tr), 0.01)
    #st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False)
    st.sort()
    st.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True)
    st.trim(T1, T2)
    st_trigger = st.copy()
    st.normalize(global_max=False)
    # do the triggering
    trig = coincidenceTrigger("recstalta", PAR.ON, PAR.OFF, st_trigger,
            thr_coincidence_sum=PAR.MIN_STATIONS,
            max_trigger_length=PAR.MAXLEN, trigger_off_extension=PAR.ALLOWANCE,
            details=True, sta=PAR.STA, lta=PAR.LTA)

    for t in trig:
        info = "%s %ss %s %s" % (t['time'].strftime("%Y-%m-%dT%H:%M:%S"), ("%.1f" % t['duration']).rjust(4), ("%i" % t['cft_peak_wmean']).rjust(3), "-".join(t['stations']))
        summary.append(info)
        tmp = st.slice(t['time'] - 1, t['time'] + t['duration'])
        outfilename = "%s/%s_%.1f_%i_%s-%s_%s.png" % (PLOTDIR, t['time'].strftime("%Y-%m-%dT%H:%M:%S"), t['duration'], t['cft_peak_wmean'], len(t['stations']), num_stations, "-".join(t['stations']))
        tmp.plot(outfile=outfilename)
        mutt += ("-a", outfilename)

summary.append("#" * 79)
예제 #2
0
summary.append("#" * 79)

trig = []
mutt = []
if st:
    # preprocessing, backup original data for plotting at end
    st.merge(0)
    st.detrend("linear")
    for tr in st:
        tr.data = tr.data * cosTaper(len(tr), 0.01)
    #st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False)
    st.sort()
    st.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True)
    st.trim(T1, T2)
    st_trigger = st.copy()
    st.normalize(global_max=False)
    # do the triggering
    trig = coincidenceTrigger("recstalta", PAR.ON, PAR.OFF, st_trigger,
            thr_coincidence_sum=PAR.MIN_STATIONS,
            max_trigger_length=PAR.MAXLEN, trigger_off_extension=PAR.ALLOWANCE,
            details=True, sta=PAR.STA, lta=PAR.LTA)

    for t in trig:
        info = "%s %ss %s %s" % (t['time'].strftime("%Y-%m-%dT%H:%M:%S"), ("%.1f" % t['duration']).rjust(4), ("%i" % t['cft_peak_wmean']).rjust(3), "-".join(t['stations']))
        summary.append(info)
        tmp = st.slice(t['time'] - 1, t['time'] + t['duration'])
        outfilename = "%s/%s_%.1f_%i_%s-%s_%s.png" % (PLOTDIR, t['time'].strftime("%Y-%m-%dT%H:%M:%S"), t['duration'], t['cft_peak_wmean'], len(t['stations']), num_stations, "-".join(t['stations']))
        tmp.plot(outfile=outfilename)
        mutt += ("-a", outfilename)

summary.append("#" * 79)
def main(_):
    stream_dirs = [file for file in os.listdir(FLAGS.stream_dir)]

    print "List of streams to anlayze", stream_dirs

    # Create dir to store tfrecords
    if not os.path.exists(FLAGS.output_dir):
        os.makedirs(FLAGS.output_dir)

    # Dictionary of nb of events per tfrecords
    metadata = {}
    output_metadata = os.path.join(FLAGS.output_dir, "metadata.json")

    # Load Catalog

    #evlog = load_catalog(FLAGS.catalog)
    #print ("+ Loading Catalog:",evlog)
    for stream_dir in stream_dirs:

        #cat = evlog[(evlog.stname == stream_dir)]
        #print cat
        # Load stream
        stream_path = os.path.join(FLAGS.stream_dir, stream_dir, "event")
        stream_files = glob.glob(stream_path + '/*HZ.D.SAC')
        #print waveforms[0]
        output_name = stream_dir + ".tfrecords"
        output_path = os.path.join(FLAGS.output_dir, output_name)
        writer = DataWriter(output_path)
        print("+ Creating tfrecords for {} events".format(len(stream_files)))
        for stream_file in stream_files:
            stream_file1 = re.sub('HZ.D.SAC', 'HE.D.SAC', str(stream_file))
            stream_file2 = re.sub('HZ.D.SAC', 'HN.D.SAC', str(stream_file))
            # Load stream
            #print "+ Loading Stream {}".format(stream_path)
            stream = read(stream_file)
            stream += read(stream_file1)
            stream += read(stream_file2)

            #stream_filepath = os.path.join(stream_path, stream_file)
            #stream = read(stream_filepath)
            #print '+ Preprocessing stream',stream
            #stream = preprocess_stream(stream)

            # Filter catalog according to the loaded stream

            start_date = stream[0].stats.starttime
            end_date = stream[-1].stats.endtime
            print("-- Start Date={}, End Date={}".format(start_date, end_date))
            x = np.random.randint(0, 4)

            print "+ Loading Stream selected\n {}\n ".format(stream)

            if len(stream) < 3:
                continue
            st_event = stream.resample(100).trim(start_date + x,
                                                 start_date + x +
                                                 FLAGS.window_size,
                                                 pad=True,
                                                 fill_value=0.0).copy()
            #st_event.resample(100)
            print(st_event)
            n_samples = len(st_event[0].data)
            sample_rate = st_event[0].stats.sampling_rate
            n_pts = sample_rate * FLAGS.window_size + 1
            cluster_id_p = 5 - x
            cluster_id_s = end_date - start_date - x - 15
            if cluster_id_s >= 30:
                continue
            assert n_pts == n_samples, "n_pts and n_samples are not the same"
            # Write event waveforms and cluster_id in .tfrecords

            # for p picks
            # u=0
            # label = np.zeros((n_samples), dtype=np.float32)
            label_obj = st_event.copy()

            label_obj[0].data[...] = 1
            label_obj[1].data[...] = 0
            label_obj[2].data[...] = 0
            u1 = cluster_id_p * sample_rate  # mean value miu
            lower = int(u1 - sample_rate)
            upper = int(u1 + sample_rate)
            label_obj[1].data[lower:upper] = 1
            # label_obj.data[int(u1 - 0.5 * sample_rate):int(u1 + 0.5 * sample_rate)] = 1
            # y_sig = np.random.normal(u1, sig, n_samples )
            # for s pick
            u2 = cluster_id_s * sample_rate  # mean value miu

            lower2, upper2 = int(u2 - sample_rate), int(u2 + sample_rate)
            try:
                label_obj[2].data[lower2:upper2] = 2
                # label_obj.data[int(u2 - sample_rate):int(u2 + sample_rate)] =2
            except:
                nnn = int(n_samples) - int(u2 + sample_rate)
                print(nnn, n_samples)
                label_obj[2].data[lower2:n_samples] = 2
            label_obj.normalize()
            label_obj[0].data = label_obj[0].data - label_obj[
                1].data - label_obj[2].data
            # label_obj.data[int(u2 - sample_rate):n_samples] = 2
            writer.write(st_event.copy().normalize(), label_obj)
            if FLAGS.save_mseed:
                output_label = "{}_{}.mseed".format(
                    st_event[0].stats.station,
                    str(st_event[0].stats.starttime).replace(':', '_'))

                output_mseed_dir = os.path.join(FLAGS.output_dir, "mseed")
                if not os.path.exists(output_mseed_dir):
                    os.makedirs(output_mseed_dir)
                output_mseed = os.path.join(output_mseed_dir, output_label)
                st_event.write(output_mseed, format="MSEED")

            # Plot events
            if FLAGS.plot:
                #import matplotlib
                #matplotlib.use('Agg')

                # from obspy.core import Stream
                traces = Stream()
                traces += st_event[0].filter('bandpass',
                                             freqmin=0.5,
                                             freqmax=20)
                traces += label_obj
                # print traces
                viz_dir = os.path.join(FLAGS.output_dir, "viz", stream_dir)
                if not os.path.exists(viz_dir):
                    os.makedirs(viz_dir)
                traces.normalize().plot(outfile=os.path.join(
                    viz_dir,
                    ####changed at 2017/11/25,use max cluster_prob instead of cluster_id
                    #                "event_{}_cluster_{}.png".format(idx,cluster_id)))
                    "event_{}_{}.png".format(
                        st_event[0].stats.station,
                        str(st_event[0].stats.starttime).replace(':', '_'))))

        # Cleanup writer
        print("Number of events written={}".format(writer._written))
        writer.close()
        # Write metadata
        metadata[stream_dir] = writer._written
        write_json(metadata, output_metadata)
예제 #4
0
 def plot(self, filter=None, save=False, show=True, ttgrid=None):
     st = Stream()
     willy = seispy.burrow.Groundhog()
     distance = sorted([
         gps2dist_azimuth(self.lat, self.lon, arrival.station.lat,
                          arrival.station.lon)[0]
         for arrival in self.arrivals
     ])
     dmin, dmax = min(distance), max(distance)
     startlag = dmin / 6000. - 7
     endlag = dmax / 2000. + 5
     for arrival in self.arrivals:
         st += willy.fetch(arrival.station.name,
                           arrival.channel.code,
                           starttime=self.time + startlag,
                           endtime=self.time + endlag)
     arrivals = sorted(
         self.arrivals,
         key=lambda arrival:
         (arrival.station.network, arrival.station.name, arrival.channel))
     if filter is not None:
         st.filter(*filter[0], **filter[1])
     st.trim(starttime=self.time + startlag + 2.)
     st.normalize()
     MAX_TRACES = 9
     ncol = int(ceil(len(st) / float(MAX_TRACES))) + 1
     nrow = int(ceil(len(st) / float(ncol - 1)))
     gs = GridSpec(nrow, ncol)
     gs.update(hspace=0, wspace=0)
     width = 1600
     height = width / float(ncol)
     fig = st.plot(size=(width, height), handle=True)
     row, col = 0, 0
     for i in range(len(fig.axes)):
         ax = fig.axes[i]
         arrival = arrivals[i]
         color = "r" if arrival.phase == "P"\
             else "g" if arrival.phase == "S" else "b"
         ax.axvline(arrival.time.toordinal() +
                    arrival.time._get_hours_after_midnight() / 24.,
                    color=color,
                    linewidth=2,
                    alpha=0.75)
         if ttgrid is not None:
             r, theta, phi = sp.geometry.geo2sph(self.lat, self.lon,
                                                 self.depth)
             try:
                 predicted = self.time + ttgrid.get_tt(
                     arrival.station.name, arrival.phase, r, theta, phi)
             except KeyError:
                 continue
             ax.axvline(predicted.toordinal() +
                        predicted._get_hours_after_midnight() / 24.,
                        color=color,
                        linewidth=2,
                        linestyle="--",
                        alpha=0.75)
         if row % nrow == 0:
             col += 1
             row = 0
         position = gs[row, col].get_position(fig)
         ax.set_position(position)
         ax.get_yaxis().set_visible(False)
         row += 1
     for ax in fig.axes[nrow - 1::nrow] + [fig.axes[-1]]:
         ax.set_xticklabels(ax.get_xticklabels(),
                            visible=True,
                            fontsize=10,
                            rotation=-15,
                            horizontalalignment="left")
     gs.update(wspace=0.2)
     postl = gs[0].get_position(fig)
     posbl = gs[ncol * (nrow - 1)].get_position(fig)
     bbox_map = Bbox(((posbl.x0, posbl.y0), (posbl.x1, postl.y1)))
     ax = fig.add_axes(bbox_map)
     self.plot_map(ax=ax)
     fig.suptitle("%s  (ID #%d)" % (self.time, self.evid))
     if save:
         plt.savefig("%s.png" % save, format="png")
     if show:
         plt.show()
     else:
         plt.close()