def main(_):
    #####################
    # Hyperparameters
    min_proba_s = 0.95  # Minimum softmax probability for phase detection
    min_proba_p = 0.85  # Minimum softmax probability for phase detection
    freq_min = 3.0
    freq_max = 20.0
    filter_data = False
    decimate_data = False  # If false, assumes data is already 100 Hz samprate
    n_shift = 10  # Number of samples to shift the sliding window at a time
    n_gpu = 1  # Number of GPUs to use (if any)
    #####################
    batch_size = 1000 * 3

    half_dur = 2.00
    only_dt = 0.01
    n_win = int(half_dur / only_dt)
    n_feat = 2 * n_win

    if not os.path.exists(FLAGS.output_path):
        os.makedirs(FLAGS.output_path)
    # Reading in liss stream
    wave = rediss.waveform('xx.xx.xxx.xxx') #should input your sever ip here
    bufferLength = wave.GetBufferLen()
    bufferLength = 60
    wave.ReadStnPara()
    ##############################
    #first model
    ##############################
    # load json and model weight
    json_model_psn= os.path.join(FLAGS.para_path, 'model_first_trained.json')
    json_file_psn = open(json_model_psn, 'r')
    loaded_psn_model_json = json_file_psn.read()
    json_file_psn.close()
    model_psn = tf.keras.models.model_from_json(loaded_psn_model_json, custom_objects={'tf': tf})
    print ("model_psn:",model_psn)

    # load weights into new model
    try:
        best_model_psn= os.path.join(FLAGS.para_path, 'model_first_trained_best.hdf5')
        model_psn.load_weights(best_model_psn)
        #print("summary_psn::",model_psn.summary())
    except:
        print(model_psn.summary())
    print("Loaded model from disk")

    #writer = DataWriter(output_path)
    if n_gpu > 1:
        model_psn = tf.keras.utils.multi_gpu_model(model_psn, gpus=n_gpu)
    p_picks_catalog = os.path.join(FLAGS.output_path, 'p_picks_by_CNN.csv')
    s_picks_catalog = os.path.join(FLAGS.output_path, 's_picks_by_CNN.csv')
    data_transfer_error = os.path.join(FLAGS.output_path, 'data_transfer_error_log.csv')
    #     df.to_csv(output_catalog)
    #     df1 = pd.DataFrame.from_dict(times_s_csv)
    #     output_catalog = os.path.join(FLAGS.output_path, 's_picks_by_CNN.csv')
    ofile_p = open(p_picks_catalog, 'w')
    ofile_s = open(s_picks_catalog, 'w')
    ofile_d = open(data_transfer_error, 'w')
    ofile_d.write("%s %s %s %s %s\n" % ("net","sta",  "timestamp",
                                                "starttime", "[errorSecSum, errorSecSum1, errorSecSum2]"))
    ofile_p.write("%s %s P %s\n" % ("net", "sta", "time"))
    ofile_s.write("%s %s S %s\n" % ("net", "sta", "time"))
    ofile_p.close()
    ofile_s.close()
    ofile_d.close()
    while True:
        if FLAGS.receive_interval < 0:
            print ('receive_interval cannot be negative.')
            break
        try:
            ofile_p = open(p_picks_catalog, 'a')
            ofile_s = open(s_picks_catalog, 'a')
            ofile_d = open(data_transfer_error, 'a')

            StnNo, ChnNo = wave.GetStnNoChnNo("FJ", "HAJF", "BHE", "00")
            #StnNo, ChnNo = wave.GetStnNoChnNo("FJ", "JLNK", "BHE", "00")
            startT, endT = wave.GetWaveTime(StnNo, ChnNo)
            print(startT, endT)
            d = datetime.utcfromtimestamp(startT)
            # print(d)
            startTime = UTCDateTime(d.year, d.month, d.day, d.hour, d.minute, d.second)

            print('--------------')
            print('startTime:', startTime)
            print(d.year, d.month, d.day, d.hour, d.minute, d.second, startTime.timestamp)
            print('--------------')
            tr, errorSecSum = wave.GetWaveDataUtcTime(StnNo, ChnNo, startTime, bufferLength)


            StnNo1, ChnNo1 = wave.GetStnNoChnNo("FJ", "HAJF", "BHN", "00")
            startT1, endT1 = wave.GetWaveTime(StnNo1, ChnNo1)
            # print(startT1, endT1)
            d1 = datetime.utcfromtimestamp(startT1)
            # print(d1)
            startTime1 = UTCDateTime(d1.year, d1.month, d1.day, d1.hour, d1.minute, d1.second)
            tr1, errorSecSum1 = wave.GetWaveDataUtcTime(StnNo1, ChnNo1, startTime1, bufferLength)


            StnNo2, ChnNo2 = wave.GetStnNoChnNo("FJ", "HAJF", "BHZ", "00")
            startT2, endT2 = wave.GetWaveTime(StnNo2, ChnNo2)
            # print(startT2, endT2)

            d2 = datetime.utcfromtimestamp(startT2)
            # print(d2)
            startTime2 = UTCDateTime(d2.year, d2.month, d2.day, d2.hour, d2.minute, d2.second)
            tr2, errorSecSum2 = wave.GetWaveDataUtcTime(StnNo2, ChnNo2, startTime2, bufferLength)

            print ("errorSecSum, errorSecSum1, errorSecSum2:", errorSecSum, errorSecSum1, errorSecSum2)

        #if (lastFile is not None):
        #    lastFile.close()

            net = tr.stats.network
            sta = tr.stats.station

            if max(errorSecSum, errorSecSum1, errorSecSum2) > 0:
                maxstart = max(startTime + errorSecSum, startTime1 + errorSecSum1, startTime2 + errorSecSum2) + 4
                ofile_d.write("%s %s %s %s  %s %s %s \n" % (net, sta, str(UTCDateTime(maxstart).timestamp),
                                                maxstart.isoformat(),str(errorSecSum), str(errorSecSum1), str(errorSecSum2)))
            else:
                maxstart = max(startTime, startTime1, startTime2)


            minend = min(tr1.stats.endtime, tr.stats.endtime, tr2.stats.endtime)
            tr.data = tr.data.astype(float)
            tr1.data = tr1.data.astype(float)
            tr2.data = tr2.data.astype(float)
            tr = tr.slice(maxstart, minend)
            tr1 = tr1.slice(maxstart, minend)
            tr2 = tr2.slice(maxstart, minend)

            st = oc.Stream()
            st += tr1
            st += tr
            st += tr2
            if FLAGS.save_mseed:
                outpath1 = os.path.join(FLAGS.output_path, "mseed")
                if not os.path.exists(outpath1):
                    os.makedirs(outpath1)
                seedname = os.path.join(outpath1, str(tr.stats.starttime).replace(':', '_') + '.mseed')
                st.write(seedname, format="MSEED")
            # latest_start = np.max([x.stats.starttime for x in st])
            ##earliest_stop = np.min([x.stats.endtime for x in st])
            # st.trim(latest_start, earliest_stop)

            st.detrend(type='linear')
            if filter_data:
                st.filter(type='bandpass', freqmin=freq_min, freqmax=freq_max)
            if decimate_data:
                st.interpolate(100.0)
            chan = st[0].stats.channel
            sr = st[0].stats.sampling_rate

            dt = st[0].stats.delta

            if FLAGS.V:
                print("Reshaping data matrix for sliding window")
            tt = (np.arange(0, st[0].data.size, n_shift) + n_win) * dt
            tt_i = np.arange(0, st[0].data.size, n_shift) + n_feat
            # tr_win = np.zeros((tt.size, n_feat, 3))
            sliding_N = sliding_window(st[0].data, n_feat, stepsize=n_shift)
            sliding_E = sliding_window(st[1].data, n_feat, stepsize=n_shift)
            sliding_Z = sliding_window(st[2].data, n_feat, stepsize=n_shift)
            tr_win = np.zeros((sliding_N.shape[0], n_feat, 3))
            # print ("tr_win",tr_win.shape)
            tr_win[:, :, 0] = sliding_N
            tr_win[:, :, 1] = sliding_E
            tr_win[:, :, 2] = sliding_Z
            # 将三通道400采样点数据的最大值取出,并扩展为(863961, 1, 1)数组形式,每个滑窗内一个最大值,并做归一化
            tr_win = tr_win / np.max(np.abs(tr_win), axis=(1, 2))[:, None, None]
            # print ("tr_win",tr_win.shape)
            tt = tt[:tr_win.shape[0]]
            tt_i = tt_i[:tr_win.shape[0]]

            if FLAGS.V:
                ts = model_psn.predict(tr_win, verbose=True, batch_size=batch_size)
            else:
                ts = model_psn.predict(tr_win, verbose=False, batch_size=batch_size)

            prob_S = ts[:, 1]
            prob_P = ts[:, 0]
            prob_N = ts[:, 2]

            trigs = trigger_onset(prob_P, min_proba_p, 0.1)
            p_picks = []
            s_picks = []
            for trig in trigs:
                if trig[1] == trig[0]:
                    continue
                pick = np.argmax(ts[trig[0]:trig[1], 0]) + trig[0]
                stamp_pick = st[0].stats.starttime + tt[pick]
                p_picks.append(stamp_pick)
                ofile_p.write("%s %s P %s\n" % (net, sta, stamp_pick.isoformat()))

            trigs = trigger_onset(prob_S, min_proba_s, 0.1)
            for trig in trigs:
                if trig[1] == trig[0]:
                    continue
                pick = np.argmax(ts[trig[0]:trig[1], 1]) + trig[0]
                stamp_pick = st[0].stats.starttime + tt[pick]
                s_picks.append(stamp_pick)
                ofile_s.write("%s %s S %s\n" % (net,sta, stamp_pick.isoformat()))

            if FLAGS.plot:
                fig = plt.figure(figsize=(8, 12))
                ax = []
                ax.append(fig.add_subplot(4, 1, 1))
                ax.append(fig.add_subplot(4, 1, 2, sharex=ax[0], sharey=ax[0]))
                ax.append(fig.add_subplot(4, 1, 3, sharex=ax[0], sharey=ax[0]))
                ax.append(fig.add_subplot(4, 1, 4, sharex=ax[0]))
                for i in range(3):
                    ax[i].plot(np.arange(st[i].data.size) * dt, st[i].data, c='k', \
                               lw=0.5)
                ax[3].plot(tt, ts[:, 0], c='r', lw=0.5)
                ax[3].plot(tt, ts[:, 1], c='b', lw=0.5)
                for p_pick in p_picks:
                    for i in range(3):
                        ax[i].axvline(p_pick - st[0].stats.starttime, c='r', lw=0.5)
                    #ax[2].axhline(s_pick - st[0].stats.starttime, c='b', lw=0.5)
                ax[3].axhline(min_proba_p, c='r', linestyle='--',lw=0.5)
                for s_pick in s_picks:
                    for i in range(3):
                        ax[i].axvline(s_pick - st[0].stats.starttime, c='b', lw=0.5)
                ax[3].axhline(min_proba_s, c='b', linestyle='--', lw=0.5)
                plt.tight_layout()
                plt.show()
                outpath = os.path.join(FLAGS.output_path, "figs")
                if not os.path.exists(outpath):
                    os.makedirs(outpath)
                filename = os.path.join(outpath, net+sta+'.'+str(st[0].stats.starttime).replace(':', '_') + '.png')
                plt.savefig(filename)
                plt.close()
            ofile_p.close()
            ofile_s.close()
            ofile_d.close()
        except KeyboardInterrupt:
            print ('stop receive at: ' + str(startTime))
            break
        except:
            print ('skip receive at: ' + str(startTime))
            ofile_p.close()
            ofile_s.close()
            ofile_d.close()
            continue
        print  ('Waiting for ' + str(FLAGS.receive_interval) + ' seconds to receive:')


        time.sleep(FLAGS.receive_interval)
Ejemplo n.º 2
0
#    ofile = open(args.O, 'w')
    ofile = open(args.O, 'a')

    for i in range(nsta):
        fname = fdir[i][0].split("/")
        if not os.path.isfile(fdir[i][0]):
            print("%s doesn't exist, skipping" % fdir[i][0])
            continue
        if not os.path.isfile(fdir[i][1]):
            print("%s doesn't exist, skipping" % fdir[i][1])
            continue
        if not os.path.isfile(fdir[i][2]):
            print("%s doesn't exist, skipping" % fdir[i][2])
            continue
        st = oc.Stream()
        st += oc.read(fdir[i][0])
        st += oc.read(fdir[i][1])
        st += oc.read(fdir[i][2])
        latest_start = np.max([x.stats.starttime for x in st])
        earliest_stop = np.min([x.stats.endtime for x in st])
        st.trim(latest_start, earliest_stop)

        st.detrend(type='linear')
        if filter_data:
            st.filter(type='bandpass', freqmin=freq_min, freqmax=freq_max)
        if decimate_data:
            st.interpolate(100.0)
        chan = st[0].stats.channel
        sr = st[0].stats.sampling_rate
Ejemplo n.º 3
0
            try:
                os.system(sud2sac + ' ' + DMXfile)
            except:
                print('Crashed on converting')

            sacfilelist = glob.glob(SACdirbase + '-???')
            if len(sacfilelist) == 0:
                # DMX file is likely a bad file. Check for WVM file and demultiplex it
                if os.path.exists(WVMfile):
                    os.system(demux + ' ' + WVMfile)
                    os.system(sud2sac + ' ' + DMXfile)
                    os.system(irig + ' ' + DMXfile)
            print('Done')

            # Now merge the SAC files into a single valid Miniseed file
            st = op.Stream()
            if len(sacfilelist) > 0:
                for sacfile in sacfilelist:
                    print('Combining ' + sacfile + ' into Miniseed file ' +
                          MSEEDfile)
                    try:
                        tr = op.read(sacfile)
                        st = st + tr
                    except:
                        pass
                if len(st) == 0:
                    print('No good SAC files')
                    continue
                st.write(MSEEDfile)
                print('Done')
                #os.remove(SACdirbase + '-???')