Exemplo n.º 1
0
def decluster_bh(cat, trig_int=2.0):
    detect_info = []
    all_detections = []
    for ev in cat:
        all_detections.append(ev)
        tpicks = [p.time for p in ev.picks]
        detect_time = min(tpicks)
        detect_val = len(ev.picks)
        detect_info.append((detect_time, detect_val))
    # Now call decluster
    min_det = sorted([d[0] for d in detect_info])[0]
    detect_vals = np.array([d[1] for d in detect_info], dtype=np.float32)
    detect_times = np.array([
        _total_microsec(d[0].datetime, min_det.datetime) for d in detect_info
    ])
    peaks_out = decluster(peaks=detect_vals,
                          index=detect_times,
                          trig_int=trig_int * 10**6)
    # Need to match both the time and the detection value
    declustered_catalog = Catalog()
    for ind in peaks_out:
        matching_time_indices = np.where(detect_times == ind[-1])[0]
        matches = matching_time_indices[np.where(
            detect_vals[matching_time_indices] == ind[0])[0][0]]
        declustered_catalog.append(all_detections[matches])

    return declustered_catalog
Exemplo n.º 2
0
    def run(self):
        """
        Make a webservice request for events using the passed in options.
        """
        self.setPriority(QtCore.QThread.LowestPriority)
        self.clearFutures()
        self.futures = {}

        catalog = None
        LOGGER.info("Making %d event requests", len(self.request.sub_requests))
        with concurrent.futures.ThreadPoolExecutor(5) as executor:
            for sub_request in self.request.sub_requests:
                # Dictionary lets us look up argument by result later
                self.futures[executor.submit(load_events, self.request.client,
                                             sub_request)] = sub_request
            # Iterate through Futures as they complete
            for result in concurrent.futures.as_completed(self.futures):
                LOGGER.debug("Events loaded")
                try:
                    if not catalog:
                        catalog = result.result()
                    else:
                        catalog += result.result()
                    self.progress.emit()
                except Exception:
                    self.progress.emit()
        self.futures = {}
        if not catalog:
            catalog = Catalog()
        LOGGER.info("Loader processing")
        catalog = self.request.process_result(catalog)
        LOGGER.info("Loader done")
        self.done.emit(catalog)
Exemplo n.º 3
0
def read_event_list(file_list):
    catalog = Catalog()
    for file in file_list:
        catalog += _read_event(file)

    catalog.events.sort(key=lambda event: event.origins[0].time)

    return catalog
Exemplo n.º 4
0
 def sendQuake(self):
     """
     Serve quake as QuakeML
     """
     splitPath = self.path.split('/')
     id = splitPath[2]
     resource_id = ResourceIdentifier(id)
     catalog = Catalog(
         [ObsPyRequestHandler.serveSeis.dataset['quake']],
         resource_id=resource_id)
     buf = io.BytesIO()
     catalog.write(buf, format="QUAKEML")
     self.send_response(200)
     self.send_header("Content-Length", buf.getbuffer().nbytes)
     self.send_header("Content-Type", "application/xml")
     self.end_headers()
     self.wfile.write(buf.getbuffer())
Exemplo n.º 5
0
def read_event_list(file_list):
    catalog = Catalog()
    try:
        for file in file_list:
            catalog += read_events(file)

    except Exception as err:
        print(err)

    catalog.events.sort(key=lambda event: event.origins[0].time)
    return catalog
Exemplo n.º 6
0
def _read_event(event_file):
    catalog = Catalog()
    try:
        cat = read_events(event_file)
        for event in cat.events:
            event.file_name = event_file

        catalog += cat

    except Exception as err:
        print(err)

    return catalog
Exemplo n.º 7
0
def load_events(client, parameters):
    """
    Execute one query for event data. This is a standalone function so we can
    run it in a separate thread.
    """
    try:
        LOGGER.info('Loading events: %s',
                    get_service_url(client, 'event', parameters))
        return client.get_events(**parameters)
    except Exception as e:
        # If no results found, the client will raise an exception, we need to trap this
        # TODO: this should be much cleaner with a fix to https://github.com/obspy/obspy/issues/1656
        if str(e).startswith("No data"):
            LOGGER.warning("No events found! Your query may be too narrow.")
            return Catalog()
        else:
            raise
Exemplo n.º 8
0
from obspy.core.event.catalog import Catalog
from obspy.core.inventory.network import Network
from obspy.core.inventory.inventory import Inventory
from obspy.core import UTCDateTime
from datetime import timedelta
from mpl_toolkits.basemap import Basemap
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt

iris = Client("IRIS")
t2 = UTCDateTime.now()
t1 = t2 - timedelta(days=30)

cat = Catalog()
cat2 = Catalog()

try:
    cat += iris.get_events(starttime=t1,
                           endtime=t2,
                           latitude=YOUR_LATITUDE,
                           longitude=YOUR_LONGITUDE,
                           maxradius=15)
except:
    pass

try:
    cat2 += iris.get_events(starttime=t1, endtime=t2, minmagnitude=6)
except:
    pass
Exemplo n.º 9
0
def get_filtered_catalog(catalog, events_iter):
    """
    Given a catalog and an iterator of selected events, return a catalog containing only the selected events.
    """
    return Catalog(list(events_iter))
Exemplo n.º 10
0
def preprocess():
    """
    
    This script preprocesses the MSEED files in the input directories 
    specified in the input file.
 
    
    """


    # Create output directory, if necessary

    outdir = os.path.join('data','processed')
     
    if rank == 0 and not os.path.exists(outdir):
        os.mkdir(outdir)
    if rank == 0 and cfg.verbose:
        print(cfg.__dict__)
    
    comm.Barrier()

    event_filter = None

    if cfg.gcmt_exclude:

        if rank == 0:
            c = Client()
            cata = c.get_events(starttime=UTCDateTime(cfg.gcmt_begin),
                endtime=UTCDateTime(cfg.gcmt_end),catalog='GCMT',
                minmagnitude=5.6)
    
            event_filter = get_event_filter(cata,cfg.Fs_new[-1],
                t0=UTCDateTime(cfg.gcmt_begin),
                t1=UTCDateTime(cfg.gcmt_end))

        
        # communicate event_filter (would it be better 
        # if every rank sets it up individually?)
        event_filter = comm.bcast(event_filter,root=0)
    
    if cfg.event_exclude_local_cat:

        local_cat = Catalog()
        
        if rank == 0:
            c = Client()
            local_cat.extend(c.get_events(
                    starttime=UTCDateTime(cfg.event_exclude_local_cat_begin),
                    endtime=UTCDateTime(cfg.event_exclude_local_cat_end),
                    #catalog=catalog,
                    minmagnitude=cfg.event_exclude_local_cat_minmag,
                    latitude=cfg.event_exclude_local_cat_lat,
                    longitude=cfg.event_exclude_local_cat_lon,
                    maxradius=cfg.event_exclude_local_cat_radius))
            print(len(local_cat),"events in local earthquake catalog.")
        # communicate event_filter (would it be better 
        # if every rank sets it up individually?)
        local_cat = comm.bcast(local_cat,root=0)

    # Create own output directory, if necessary
    rankdir = os.path.join(outdir,
        'rank_%g' %rank)
    if not os.path.exists(rankdir):
        os.mkdir(rankdir)

    
    #- Find input files
    
    content = find_files(cfg.input_dirs,
        cfg.input_format)
    if rank==0:
        print(len(content), "files found") 
    #print(content)

    # processing report file
    sys.stdout.flush()
    output_file = os.path.join(rankdir,
        'processing_report_rank%g.txt' %rank)
    
    if os.path.exists(output_file):
        ofid = open(output_file,'a')
        print('UPDATING, Date:',file=ofid)
        print(time.strftime('%Y.%m.%dT%H:%M'),file=ofid)
    else:
        ofid = open(output_file,'w')
        print('PROCESSING, Date:',file=ofid)
        print(time.strftime('%Y.%m.%dT%H:%M'),file=ofid)


    # select input files for this rank    
    content = content[rank::size]
    if cfg.testrun: # Only 3 files randomly selected
        indices = randint(0,len(content),3)
        content = [content[j] for j in indices]

    # Loop over input files
    for filepath in content:
        
        print('-------------------------------------',file=ofid)
        print('Attempting to process:',file=ofid)
        print(os.path.basename(filepath),file=ofid)
        
        try:
            prstr = PrepStream(filepath,ofid)
        except:
            print('** Problem opening file, skipping: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue

        if len(prstr.stream) == 0:
            print('** No data in file, skipping: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue
        
        try:
            prstr.prepare(cfg)
        except:
           print('** Problems preparing stream: ',file=ofid)
           print('** %s' %filepath,file=ofid)
           continue
            
        try:
            prstr.process(cfg,event_filter,local_cat)
        except:
            print('** Problems processing stream: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue

        try:
            prstr.write(rankdir,cfg)
        except:
            print('** Problems writing stream: ',file=ofid)
            print('** %s' %filepath,file=ofid)

        ofid.flush()
        
    ofid.close()

    print("Rank %g has completed processing." 
        %rank,file=None)
    
    
    try:
        os.system('mv '+rankdir+'/* '+outdir)
    except:
        pass

    os.system('rmdir '+rankdir)
Exemplo n.º 11
0
                    cap = 2

                outString += (fragString[:cap].upper() +
                              fragString[cap:]).strip() + " "

    return (outString[:1].upper() + outString[1:]
            ).strip()  # Capitalize first letter and strip trailing space


print('%s%% - Importing IRIS FDSN client...' % (p1))
iris = Client("IRIS")
t2 = UTCDateTime.now()
t2str = t2.strftime('%Y-%m-%d %H:%M UTC')
t1 = t2 - timedelta(days=DURATION)

cat = Catalog()
nrcat = Catalog()
cat2 = Catalog()

####### LOCAL ########

try:
    print('%s%% - Getting local earthquakes within %s degrees from IRIS...' %
          (p2, LOCAL_RADIUS))
    cat += iris.get_events(starttime=t1,
                           endtime=t2,
                           latitude=YOUR_LATITUDE,
                           longitude=YOUR_LONGITUDE,
                           minmagnitude=LOCAL_MAG,
                           maxradius=LOCAL_RADIUS)
    for evt in cat:
Exemplo n.º 12
0
from datetime import timedelta
from obspy import Stream
from obspy.clients.fdsn.mass_downloader import CircularDomain, Restrictions, MassDownloader

#import datetime
#client = Client('IRIS')
#cat += iris.get_events(starttime=t1, endtime=t2, latitude=YOUR_LATITUDE,
#                           longitude=YOUR_LONGITUDE, maxradius=15)
LATITUDE = 35.48648649
# negative longitude indicates western hemisphere
LONGITUDE = -97.51380573

t2 = UTCDateTime.now()
t1 = t2 - timedelta(days=1)

cat = Catalog()
cat2 = Catalog()

try:
    usgs = Client("USGS")
    cat += usgs.get_events(starttime=t1,
                           endtime=t2,
                           latitude=LATITUDE,
                           longitude=LONGITUDE,
                           maxradius=2,
                           minmagnitude=2)
except:
    pass

try:
    usgs = Client("USGS")
Exemplo n.º 13
0
def main(st, fname, verbose=False):
    fs = st[0].stats.sampling_rate

    # Detect STA/LTA for all geodes, with minimum number of stations included
    proc1 = time.time()
    detection_list, cft_stream = network_detection(st, cft_return=True)
    proc2 = time.time()
    Logger.info("Network detection search done in %f s." % (proc2 - proc1))
    Logger.info("Number of network detections = %d" % len(detection_list))

    # Get picks and stats, iterating detection by detection, then station by station
    # Buffer window before and after detection
    buffer1 = 3.0  # 0.2
    buffer2 = 10.0

    # Load ERT data
    ert_surveys_file = "survey_times_ERT.csv"
    dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
    ert_surveys = pd.read_csv(ert_surveys_file,
                              parse_dates=["time_local_start"],
                              date_parser=dateparse)
    ert_surveys["time_local_start"] = ert_surveys[
        "time_local_start"].dt.tz_localize("America/Edmonton",
                                           ambiguous="infer")
    ert_surveys["time_utc_start"] = ert_surveys[
        "time_local_start"].dt.tz_convert(None)
    ert_surveys["time_utc_end"] = ert_surveys["time_utc_start"] + pd.Timedelta(
        25, unit="m")
    ert_surveys["time_utc_end"] = pd.to_datetime(ert_surveys["time_utc_end"])
    ert_surveys["time_utc_start"] = pd.to_datetime(
        ert_surveys["time_utc_start"])

    catalog = Catalog()
    # Loop over each STA/LTA detection
    for detection in detection_list:

        # Skip if detection happens during ERT survey
        tmin = detection["time"]._get_datetime()
        is_ert_on = \
            ert_surveys.loc[(ert_surveys['time_utc_start'] <= tmin) & (ert_surveys['time_utc_end'] >= tmin)].shape[
                0] > 0
        if is_ert_on:
            Logger.warning("Skip false detection during ERT survey.")
            continue

        Logger.info("DETECTION TIME: %s\n\t DURATION_SEC: %f" %
                    (detection["time"], detection["duration"]))
        det_start = detection["time"]
        det_end = detection["time"] + detection["duration"]

        # Detection stream
        det_st = st.slice(starttime=det_start - buffer1,
                          endtime=det_end + buffer2)
        det_st.detrend()
        det_st_to_save = det_st.copy()
        t_plt = det_st[0].times("matplotlib")
        t_utc = det_st[0].times("utcdatetime")
        det_cft = cft_stream.slice(starttime=det_start - buffer1,
                                   endtime=det_end + buffer2)

        # Stations in detection stream
        station_list = list(set(detection["stations"]))
        station_list.sort()

        # Check if frequencies within window are anomalous
        highf_ratio_threshold = 0.6
        for station in station_list:
            tmp = det_st.select(station=station).copy()
            nbad = 0
            for tr in tmp:
                ratio = highf_ratio(data=tr.data, sampling_rate=fs)
                if ratio > highf_ratio_threshold:
                    nbad += 1
            if nbad > 0:
                for tr in tmp:
                    Logger.warning(
                        "Removing station %s because for %d traces, ratio of frequencies above %f is above %f"
                        % (station, nbad, 0.25 * fs, highf_ratio_threshold))
                    det_st.remove(tr)

        # Stations in detection stream
        station_list = list(set(detection["stations"]))
        station_list.sort()

        if len(station_list) < 4:
            Logger.warning(
                "Only %d stations left, less than 4, so skipping this detection"
                % len(station_list))

        # Search window for phase around STA/LTA detection time
        idet_start = (np.abs(t_utc - det_start)).argmin()
        idet_end = (np.abs(t_utc - det_end)).argmin()
        idx_search_max = range(idet_start, idet_end)

        # Analyze stations one by one
        pol_st = Stream()
        event_phases = []
        for ista, station in enumerate(station_list):

            # Select waveform and STA-LTA streams
            sta_st = det_st.select(station=station).copy()
            network = sta_st[0].stats.network
            sta_st.detrend()
            sta_cft = det_cft.select(station=station).copy()
            sta_cft_stack = (sta_cft.select(channel="DPZ")[0].data +
                             sta_cft.select(channel="DPN")[0].data +
                             sta_cft.select(channel="DPE")[0].data) / 3

            # Polarization properties
            tpol, pol_dict, pol_st_sta = modified_polarization_analysis(
                sta_st, dominant_period=DOM_PERIOD, interpolate=True)
            pol_st += pol_st_sta

            # Energy response curve for pick detection
            per = peak_eigenvalue_ratio(pol_dict["eigenvalue1"],
                                        win_len=int(2 * DOM_PERIOD * fs))
            per = eps_smooth(per, w=int(EPS_WINLEN * fs))
            jer = joint_energy_ratio(sta_cft_stack, t_plt, per, tpol)

            # Extract phases
            sta_phases = get_phases(response_curve=jer,
                                    idx_search_max=idx_search_max,
                                    time=t_utc,
                                    pol=pol_dict,
                                    verbose=False)
            if sta_phases:

                # Now do some quality control
                snr_threshold = 2.5
                win_len_s = 0.2
                sta_phases["station"] = station
                sta_phases["network"] = network

                if sta_phases["P"]["arrival_time"]:
                    arr_time = sta_phases["P"]["arrival_time"] - 0.02

                    snr, channel = get_snr_phase(sta_st,
                                                 time=arr_time,
                                                 win_len_s=win_len_s,
                                                 verbose=False,
                                                 tnoise=None)
                    Logger.info("SNR for P pick %s.%s..%s: %f \t at t = %s" %
                                (network, station, channel, snr, arr_time))
                    if snr < snr_threshold:
                        #Logger.info("P pick below SNR threshold of %f" % snr_threshold)
                        sta_phases["P"]["arrival_time"] = None
                    else:
                        sta_phases["P"]["SNR"] = snr
                        sta_phases["P"]["channel"] = channel

                if sta_phases["S"]["arrival_time"]:
                    arr_time = sta_phases["S"]["arrival_time"] - 0.02
                    if sta_phases["P"]["arrival_time"]:
                        tnoise = sta_phases["P"]["arrival_time"] - 0.02
                    else:
                        tnoise = None
                    snr, channel = get_snr_phase(sta_st.select(),
                                                 time=arr_time,
                                                 win_len_s=win_len_s,
                                                 verbose=False,
                                                 tnoise=tnoise)

                    Logger.info("SNR for S pick %s.%s..%s: %f \t at t = %s" %
                                (network, station, channel, snr, arr_time))
                    if snr < snr_threshold:
                        Logger.info("S pick below SNR threshold of %f" %
                                    snr_threshold)
                        sta_phases["S"]["arrival_time"] = None
                    else:
                        sta_phases["S"]["SNR"] = snr
                        sta_phases["S"]["channel"] = channel

                Logger.info("Station %s: t_P = %s\tt_S = %s" %
                            (station, sta_phases["P"]["arrival_time"],
                             sta_phases["S"]["arrival_time"]))
                event_phases.append(sta_phases)
            else:
                Logger.info("No phase found for station %s" % station)
            # End of for loop over stations

        if not event_phases:
            Logger.info("No picks found at all for this detection.")
            continue
        else:
            nump = len([p for p in event_phases if p["P"]["arrival_time"]])
            nums = len([p for p in event_phases if p["S"]["arrival_time"]])
            Logger.info("Number of initial picks before MCCC: P = %d, S = %d" %
                        (nump, nums))
        if nump + nums == 0:
            Logger.info("No picks found at all for this detection.")
            continue
        # if verbose:
        #     plot_phases(event_phases, det_st)
        #     wadati_plot(event_phases, det_st)

        # Align with mccc
        Logger.info("Refining picks with MCCC")
        event_phases = align_mccc(event_phases=event_phases,
                                  stream=det_st,
                                  verbose=False)

        nump = len([p for p in event_phases if p["P"]["arrival_time"]])
        nums = len([p for p in event_phases if p["S"]["arrival_time"]])
        if nump == 0 and nums == 0:
            Logger.warning("No remaining picks after MCCC!")
            continue
        elif nump + nums < 5:
            Logger.info("Less than 5 picks remaining. Skipping event.")
            continue
        if verbose:
            Logger.info("Number of picks after MCCC: P = %d, S = %d" %
                        (nump, nums))
            wadati_plot(event_phases, det_st)
            plot_phases(event_phases, det_st)

        # Update polarization statistics
        Logger.info("Updating polarization attributes")
        phase_len_tol = int(10 * DOM_PERIOD * fs)
        for i, staph in enumerate(event_phases):
            sta_st = det_st.select(station=staph["station"]).copy()
            t = sta_st[0].times("utcdatetime")
            tpol, pol_dict, _ = modified_polarization_analysis(
                sta_st, dominant_period=DOM_PERIOD, interpolate=True)
            tp = staph["P"]["arrival_time"]
            if tp:
                idxP = np.argmin(np.abs(t - tp))
                stats = pol_window_stats(pol_dict,
                                         idxP,
                                         phase_len_tol,
                                         show_stats=False)
                event_phases[i]["P"]["pol_stats"] = stats
            ts = staph["S"]["arrival_time"]
            if ts:
                idxS = np.argmin(np.abs(t - ts))
                stats = pol_window_stats(pol_dict,
                                         idxS,
                                         phase_len_tol,
                                         show_stats=False)
                event_phases[i]["S"]["pol_stats"] = stats

        # Convert to obspy Picks and Event
        event_picks = []
        for i, staph in enumerate(event_phases):
            event_picks += sta_phases_to_pick(staph=staph)
        event = Event(picks=event_picks)

        # Estimate average event distance using availables pairs of P and S picks
        r_med = distance_from_tstp(event.picks, min_estim=1)
        if not r_med:  # We cannot estimate r, hence magnitude
            Logger.warning(
                "Couldn't estimate hypocentral distance from ts-tp. No magnitude calculation."
            )
            # Add event to catalog
            if verbose:
                Logger.info(
                    "Adding event to catalog: *******************************************"
                )
                Logger.info(event)
            catalog.events.append(event)
            stfilepath = os.path.join("detections_waveforms",
                                      det_start.strftime("%Y%m%d"))
            if not os.path.exists(stfilepath):
                os.mkdir(stfilepath)
            det_st_to_save.write(os.path.join(
                stfilepath,
                "bhdetect_%s.mseed" % det_start.strftime("%Y%m%d%H%M%S")),
                                 format="MSEED")

            continue

        # Calculate magnitudes
        Logger.info("Computing magnitudes...")
        magtime_contriblist = []
        magspec_contriblist = []
        for ista, station in enumerate(station_list):
            sta_picks = [
                p for p in event.picks if p.waveform_id.station_code == station
            ]
            r = distance_from_tstp(sta_picks, min_estim=2)
            if not r:
                r = r_med
            ts = get_pick(event.picks, station, "S")
            if not ts:  # No ts pick
                Logger.warning("There is no S pick for station %s." % station)
                continue
            sta_st = det_st.select(station=station).copy()
            sta_st.detrend()

            # Estimate coda
            tp = get_pick(event.picks, station, "P")
            if not tp:
                tsig = ts - 0.5
            else:
                tsig = tp - 0.02
            tcoda, s_len, snr = get_coda_duration(sta_st.copy(),
                                                  tsig=tsig,
                                                  ts=ts,
                                                  win_len_s=0.2)
            if not tcoda:
                if verbose:
                    Logger.info(
                        "Couldn't calculate coda duration for station %s skipping..."
                        % station)
                continue

            # Save coda info
            amp = Amplitude(generic_amplitude=tcoda,
                            snr=snr,
                            type="END",
                            category="duration",
                            unit="s",
                            magnitude_hint="Md")
            event.amplitudes.append(amp)

            # Estimate energy flux
            if tp:
                Logger.info("Calculating energy flux fr station %s" % station)
                epsilonS = 0
                for tr in sta_st.copy():
                    tr_cut = tr.trim(starttime=ts, endtime=ts + (ts - tp)).data
                    cumsum_u2 = scipy.integrate.cumtrapz(tr_cut**2,
                                                         dx=tr.stats.delta)
                    epsilonS += cumsum_u2[-1]
                amp = Amplitude(generic_amplitude=epsilonS,
                                snr=snr,
                                type="A",
                                category="integral",
                                unit="other",
                                time_window=TimeWindow(begin=ts - tp,
                                                       end=2 * (ts - tp),
                                                       reference=tp),
                                waveform_id=WaveformStreamID(
                                    network_code=tr.stats.network,
                                    station_code=tr.stats.station))
                event.amplitudes.append(amp)

            # Estimate Mw for each component
            Mw_spec_sta = []
            Mw_time_sta = []
            Q_spec_sta = []
            fc_spec_sta = []
            for tr in sta_st:
                # Cut noise window and S waveform
                noise_len = s_len
                taper_perc = 0.1
                trnoise = tr.copy()
                trnoise.trim(starttime=tsig - (1 + taper_perc) * noise_len,
                             endtime=tsig - taper_perc * noise_len)
                trnoise.taper(type="hann",
                              max_percentage=taper_perc,
                              side="both")
                tr.trim(starttime=ts - taper_perc * s_len,
                        endtime=ts + (1 + taper_perc) * s_len)
                tr.taper(type="hann", max_percentage=taper_perc, side="both")

                # Check SNR
                snr_trace = np.median(tr.slice(starttime=ts, endtime=ts + s_len).data) / \
                            np.median(trnoise.data)

                if snr_trace < 3:
                    Logger.info(
                        "SNR < 3, skipping trace for magnitude calculation.")
                    # Poor SNR, skip trace
                    continue

                # Displacement waveform
                trdisp = tr.copy()
                trdisp.integrate()
                trdisp.detrend()

                # Estimate magnitude: time method
                Mw_time, M0_time, omega0_time = estimate_magnitude_time(
                    trdisp, r, disp=False)
                Mw_time_sta.append(Mw_time)

                # Estimate magnitude: spectral method
                Mw_o, M0_o, omega0_o, fc_o, Q_o = estimate_magnitude_spectral(
                    trdisp, r, omega0_time, trnoise=None, disp=False)
                if not Mw_o:
                    Logger.warning("No magnitude found due to errors.")
                    continue
                elif fc_o < 2 or Q_o > 40 or Q_o < 1:  # Qs Attenuation larger than Sandstone=31, shale=10
                    # Reject spectral estimate
                    Logger.warning(
                        "Rejecting spectral estimate with: fc = %f, Q = %f" %
                        (fc_o, Q_o))
                    continue
                else:
                    Mw_spec_sta.append(Mw_o)
                    Q_spec_sta.append(Q_o)
                    fc_spec_sta.append(fc_o)

            # Now get average for station as a whole
            Logger.info(
                "Found %d estimates of Mw using time method for station %s." %
                (len(Mw_time_sta), station))
            Logger.info(
                "Found %d estimates of Mw using spectral method for station %s."
                % (len(Mw_spec_sta), station))
            if Mw_time_sta:
                smagt = StationMagnitude(
                    mag=np.mean(Mw_time_sta),
                    mag_errors=QuantityError(uncertainty=np.std(Mw_time_sta)),
                    station_magnitude_type="Mw_time",
                    comments=[Comment(text="snr = %f" % snr)])
                event.station_magnitudes.append(smagt)
                contrib = StationMagnitudeContribution(
                    station_magnitude_id=smagt.resource_id, weight=snr)
                magtime_contriblist.append(contrib)
                Logger.info("Magnitude time estimate = %f" %
                            np.mean(Mw_time_sta))

            if Mw_spec_sta:
                smags = StationMagnitude(
                    mag=np.mean(Mw_spec_sta),
                    mag_errors=QuantityError(uncertainty=np.std(Mw_spec_sta)),
                    station_magnitude_type="Mw_spectral",
                    comments=[
                        Comment(text="Q_mean = %f, Q_std = %f" %
                                (np.mean(Q_spec_sta), np.std(Q_spec_sta))),
                        Comment(text="Fc_mean = %f, Fc_std = %f" %
                                (np.mean(fc_spec_sta), np.std(fc_spec_sta))),
                        Comment(text="snr = %f" % snr)
                    ])
                event.station_magnitudes.append(smags)
                contrib = StationMagnitudeContribution(
                    station_magnitude_id=smags.resource_id, weight=snr)
                magspec_contriblist.append(contrib)
                Logger.info("Magnitude spectral estimate = %f" %
                            np.mean(Mw_spec_sta))
                Logger.info("Fc = %f, Q = %f" %
                            (np.mean(fc_spec_sta), np.mean(Q_spec_sta)))

            # End of for loop over stations

        # Get magnitude for event
        if magspec_contriblist:
            Logger.info(
                "Found %d station estimates of Mw using spectral method." %
                len(magspec_contriblist))
            wave_num = 0
            wave_den = 0
            val_list = []
            for m in magspec_contriblist:
                mval = [
                    sm.mag for sm in event.station_magnitudes
                    if sm.resource_id == m.station_magnitude_id
                ][0]
                wave_num += mval * m.weight
                wave_den += m.weight
                val_list.append(mval)
            mag = wave_num / wave_den
            mags = Magnitude(
                mag=mag,
                mag_errors=np.std(val_list),
                magnitude_type="Mw_spectral",
                station_count=len(magspec_contriblist),
                station_magnitude_contributions=magspec_contriblist)
            event.magnitudes.append(mags)
            Logger.info(
                "Event magnitude estimate using spectral method: Mw = %f" %
                mags.mag)
        if magtime_contriblist:
            Logger.info("Found %d station estimates of Mw using time method." %
                        len(magtime_contriblist))
            wave_num = 0
            wave_den = 0
            val_list = []
            for m in magtime_contriblist:
                mval = [
                    sm.mag for sm in event.station_magnitudes
                    if sm.resource_id == m.station_magnitude_id
                ][0]
                wave_num += mval * m.weight
                wave_den += m.weight
                val_list.append(mval)
            mag = wave_num / wave_den
            magt = Magnitude(
                mag=mag,
                mag_errors=np.std(val_list),
                magnitude_type="Mw_time",
                station_count=len(magtime_contriblist),
                station_magnitude_contributions=magtime_contriblist)
            event.magnitudes.append(magt)
            Logger.info("Event magnitude estimate using time method: Mw = %f" %
                        magt.mag)

        # Add event to catalog
        if verbose:
            Logger.info(
                "Adding event to catalog: *******************************************"
            )
            Logger.info(event)
        catalog.events.append(event)
        stfilepath = os.path.join("detections_waveforms",
                                  det_start.strftime("%Y%m%d"))
        if not os.path.exists(stfilepath):
            os.mkdir(stfilepath)
            det_st_to_save.write(os.path.join(
                stfilepath,
                "bhdetect_%s.mseed" % det_start.strftime("%Y%m%d%H%M%S")),
                                 format="MSEED")

    if len(catalog) > 0:
        # Decluster
        declustered_catalog = decluster_bh(catalog, trig_int=2.0)
        if not os.path.exists(os.path.split(fname)[0]):
            os.mkdir(os.path.split(fname)[0])
        declustered_catalog.write(fname, format="QUAKEML")
Exemplo n.º 14
0
def processor(sta, start, end, dbscale, filtmin, filtmax, inpath='/var/www/nezsite/nezsite/nezsite/media/seismic', OUTPATH='/var/www/nezsite/nezsite/nezsite/media/shakedown'):
	#global INPATH
	day = start.strftime('%Y.%j')
	yday = (start - timedelta(days=1)).strftime('%Y.%j')
	daystart = UTCDateTime(start.year, start.month, start.day)
	dayend = daystart + timedelta(days=1)
	if dayend > datetime.now():
		now = UTCDateTime.now()
		mins = 0
		hourdelta = timedelta(hours=0)
		if 14 >= now.minute >= 0:
			mins = 15
		elif 29 >= now.minute >= 15:
			mins = 30
		elif 44 >= now.minute >= 30:
			mins = 45
		else:
			mins = 0
			hourdelta = timedelta(hours=1)
		now += hourdelta
		dayend = UTCDateTime(now.year, now.month, now.day, now.hour, mins)
		daystart = dayend - timedelta(days=1)
	avail = day + '.png'
	avail = os.path.join(AVAILPATH, avail)

	if sta:
		stn = sta
		#sta = sta + '.D.'
	else:
		stn = str(STA_DEF[0:-2])
		#sta = STA_DEF
		sta = stn

	stc = stn.split('.')
	net = stc[0]
	sta = stc[1]
	loc = stc[2]
	ch = stc[3]

	fn = '%s.%s.%s.%s.%s' % (sta, net, loc, ch, day)
	yfn = '%s.%s.%s.%s.%s' % (sta, net, loc, ch, yday)

	inpath = os.path.join(inpath, stc[0], stc[1])
	if os.path.isdir(os.path.join(inpath, 'proc')):
		pass
	else:
		os.mkdir(os.path.join(inpath, 'proc'))

	shutil.copy2(os.path.join(inpath, fn), os.path.join(inpath, 'proc'))
	shutil.copy2(os.path.join(inpath, yfn), os.path.join(inpath, 'proc'))
	ypath = inpath
	inpath = os.path.join(inpath, 'proc')


	tz = int(datetime.now(pytz.timezone('America/New_York')).strftime('%z'))/100
	fmin = 0.1
	fmax = 25
	fminbp = filtmin
	fmaxbp = filtmax
	if 'ORNO' in sta:
		fminbp = 0.03 # 33.3 seconds
		fmaxbp = 0.1  # 10 seconds

	heli = os.path.join(OUTPATH, stn + '.' + day + '-heli.png')
	helibp = os.path.join(OUTPATH, stn + '.' + day + '-heli-band.png')
	dur, spec = '', ''

	st = read().clear()
	yst = st.copy()
	try:
		yst = read(os.path.join(ypath, yfn))
	except:
		print("error reading yesterday's miniSEED file. may be further errors...")

	try:
		st = read(os.path.join(inpath, fn))
		os.remove(os.path.join(inpath, fn))
	except:
		print("error reading today's miniSEED file. may be further errors...")

	net = str(st[0].stats.network)
	sta = str(st[0].stats.station)
	loc = str(st[0].stats.location)
	ch = str(st[0].stats.channel)
	startt = str(st[0].stats.starttime)
	sr = str(st[0].stats.sampling_rate)

	st = yst + st
	#st.merge()
	st = st.slice(starttime=daystart, endtime=dayend)

	sbp = st.copy()
	sbp = sbp.filter('bandpass', freqmin=fminbp, freqmax=fmaxbp, zerophase=True)
	spu = st.slice(starttime=start, endtime=end)
	sps = sbp.slice(starttime=start, endtime=end) # slice for bandpass spectrogram

	cat = Catalog()
	try:
		cat.extend(read_events(pathname_or_url='/var/www/nezsite/nezsite/nezsite/media/seismic/events/evtmajor30days.xml', format='QUAKEML'))
	except:
		pass
	try:
		cat.extend(read_events(pathname_or_url='/var/www/nezsite/nezsite/nezsite/media/seismic/events/evtlocal30days.xml', format='QUAKEML'))
	except:
		pass

	'''
	# get events
	client = Client("USGS")
	cat = Catalog()
	try:
		cat += client.get_events(starttime=daystart, endtime=dayend, latitude=44.036114, longitude=-70.439856, maxradius=10)
	except FDSNException:
		pass
	try:
		cat += client.get_events(starttime=daystart, endtime=dayend, latitude=44.036114, longitude=-70.439856,
									minradius=10, maxradius=15, minmagnitude=2.5)
	except FDSNException:
		pass
	try:
		cat += client.get_events(starttime=daystart, endtime=dayend, minmagnitude=6.5)
	except FDSNException:
		pass
	'''

	title = net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - rate: ' + sr

	st.plot(type="dayplot", size=(1600, 1200), title=title + 'Hz - band: 0-25Hz', vertical_scaling_range=2000,
		tick_format='%H:%M', outfile=heli, color=['k', 'r', 'b', 'g'], linewidth=0.3, time_offset=tz, events=cat)
	sbp.plot(type="dayplot", size=(1600, 1200), title=title + 'Hz - band: '+ str(fminbp) + '-' + str(fmaxbp) + 'Hz', vertical_scaling_range=200,
		tick_format='%H:%M', outfile=helibp, color=['k', 'r', 'b', 'g'], linewidth=0.3, time_offset=tz, events=cat)

	#st.plot(type="dayplot", title=net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - rate: ' + sr + 'Hz - band: 0-25Hz', vertical_scaling_range=8e3, outfile=heli, color=['k', 'r', 'b', 'g'], time_offset=tz, events={'min_magnitude': 6.5})
	#sbp.plot(type="dayplot", title=net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - rate: ' + sr + 'Hz - band: '+ str(fminbp) + '-' + str(fmaxbp) + 'Hz', vertical_scaling_range=7e2, outfile=helibp, color=['k', 'r', 'b', 'g'], time_offset=tz, events={'min_magnitude': 6.5})

	heli = WEBPATH + os.path.split(heli)[1]
	helibp = WEBPATH + os.path.split(helibp)[1]

	if end:
		dur = end - start


	sp = spu.detrend(type='constant')
	ss = sps.detrend(type='constant')

	startt = str(sp[0].stats.starttime)


	## ------------------------- ##
	# make spectrogram figure 1
	fig = plt.figure(figsize=(16,6), dpi=100)
	ax1 = fig.add_axes([0.068, 0.75, 0.85, 0.2]) #[left bottom width height]
	ax2 = fig.add_axes([0.068, 0.1, 0.85, 0.6], sharex=ax1)
	ax3 = fig.add_axes([0.931, 0.1, 0.03, 0.6])

	# labels
	fig.suptitle(net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - samplerate: ' + sr + 'Hz - frequency band: 0-25 Hz')
	ax1.set_ylabel('Traces')
	ax2.set_xlabel('Time [s]')
	ax2.set_ylabel('Frequency [Hz]')
	ax3.set_ylabel('Energy density [dimensionless]') # doesn't work

	# make time vector
	t = np.arange(sp[0].stats.npts) / sp[0].stats.sampling_rate

	# plot waveform (top subfigure)
	ax1.plot(t, sp[0].data, 'k', linewidth=0.5)

	# plot spectrogram (bottom subfigure)
	fig = sp[0].spectrogram(show=False, axes=ax2, log=False, dbscale=dbscale, cmap='viridis')
	mappable = ax2.images[0]
	plt.colorbar(mappable=mappable, cax=ax3)

	ax2.set_ylim(fmin, fmax)

	if 'cronplots' in OUTPATH:
		spec = os.path.join(OUTPATH, stn + '.' + start.strftime('%Y.%j') + "-spec.png")
	else:
		spec = os.path.join(OUTPATH, stn + '.' + start.strftime('%Y.%j.%H%M%S-') + str(dur) + "-spec.png")
	plt.savefig(spec)
	spec = WEBPATH + os.path.split(spec)[1]


	## ------------------------- ##
	# make spectrogram figure 2
	sfig2 = plt.figure(figsize=(16,4), dpi=100)
	ax1 = sfig2.add_axes([0.068, 0.600, 0.85, 0.3]) #[left bottom width height]
	ax2 = sfig2.add_axes([0.068, 0.115, 0.85, 0.4], sharex=ax1)
	ax3 = sfig2.add_axes([0.932, 0.115, 0.03, 0.4])

	# labels
	sfig2.suptitle(net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - samplerate: ' + sr + 'Hz - bandpass: '******'-' + str(fmaxbp) + ' Hz')
	ax1.set_ylabel('Counts')
	ax2.set_xlabel('Time [s]')
	ax2.set_ylabel('Frequency [Hz]')
	ax3.set_ylabel('Energy density [dimensionless]') # doesn't work

	# make time vector
	t = np.arange(ss[0].stats.npts) / ss[0].stats.sampling_rate

	# plot waveform (top subfigure)
	ax1.plot(t, ss[0].data, 'k', linewidth=0.5)

	# plot spectrogram (bottom subfigure)
	sfig2 = ss[0].spectrogram(show=False, axes=ax2, log=False, dbscale=dbscale, cmap='viridis')
	mappable = ax2.images[0]
	plt.colorbar(mappable=mappable, cax=ax3)

	ax2.set_ylim(fminbp, fmaxbp)


	if 'cronplots' in OUTPATH:
		specbp = os.path.join(OUTPATH, stn + '.' + start.strftime('%Y.%j') + "-spec-band.png")
	else:
		specbp = os.path.join(OUTPATH, stn + '.' + start.strftime('%Y.%j.%H%M%S-') + str(dur) + "-spec-band.png")
	plt.savefig(specbp)
	specbp = WEBPATH + os.path.split(specbp)[1]



	imgpaths = {
		'avail': avail,
		'filtmin': fminbp,
		'filtmax': fmaxbp,
		'heli': heli,
		'helibp': helibp,
		'spec': spec,
		'specbp': specbp,
	}
	return imgpaths