print("etime", etime)

                # cut the 3-component template and save file
                nchannels = len(channels)

                for ichan in range(0, nchannels):
                    print("ista", ista)
                    st1.clear()
                    # print("FILE", file)
                    st1 = st.copy()
                    tw = Trace()
                    st2.clear()
                    print(st1.select(station=ista, channel=channels[ichan]))
                    st2 = st1.select(station=ista, channel=channels[ichan])

                    if st2.__nonzero__():
                        tw = st2[0]

                        if tw.trim(stime, etime).__nonzero__():
                            print(tw)
                            netwk = tw.stats.network
                            ch = tw.stats.channel
                            tw.trim(stime, etime)
                            newfile = (
                                temp_dir
                                + str(iev)
                                + "."
                                + netwk
                                + "."
                                + ista
                                + ".."
Example #2
0
    def create_SG2K_initiate(self, event, quake_df):

        # Launch the custom station/component selection dialog
        sel_dlg = selectionDialog(parent=self,
                                  sta_list=self.station_list,
                                  chan_list=self.channel_codes)
        if sel_dlg.exec_():
            select_sta, select_comp = sel_dlg.getSelected()

            # specify output directory for miniSEED files
            temp_seed_out = os.path.join(os.path.dirname(self.cat_filename),
                                         event)

            # create directory
            if os.path.exists(temp_seed_out):
                shutil.rmtree(temp_seed_out)
            os.mkdir(temp_seed_out)

            query_time = UTCDateTime(quake_df['qtime'] - (10 * 60)).timestamp

            trace_starttime = UTCDateTime(quake_df['qtime'] - (5 * 60))
            trace_endtime = UTCDateTime(quake_df['qtime'] + (15 * 60))

            # Create a Stream object to put data into
            # st = Stream()
            # Create a dictionary to put traces into (keys are tr_ids)
            st_dict = defaultdict(list)

            print('---------------------------------------')
            print('Finding Data for Earthquake: ' + event)

            if os.path.splitext(self.db_filename)[1] == ".db":
                # run SQL query
                for matched_entry in self.session.query(Waveforms). \
                        filter(or_(and_(Waveforms.starttime <= query_time, query_time < Waveforms.endtime),
                                   and_(query_time <= Waveforms.starttime, Waveforms.starttime < query_time + 30 * 60)),
                               Waveforms.station.in_(select_sta),
                               Waveforms.component.in_(select_comp)):
                    print(matched_entry.ASDF_tag)

                    # read in the data to obspy
                    temp_st = read(
                        os.path.join(matched_entry.path,
                                     matched_entry.waveform_basename))

                    # modify network header
                    temp_tr = temp_st[0]
                    temp_tr.stats.network = matched_entry.new_network

                    # st.append(temp_tr)
                    st_dict[temp_tr.get_id()].append(temp_tr)

            if os.path.splitext(self.db_filename)[1] == ".json":
                # run python dictionary query
                for key, matched_entry in self.network_dict.iteritems():
                    if ((matched_entry['starttime'] <= query_time < matched_entry['endtime']) \
                                or (
                                query_time <= matched_entry['starttime'] and matched_entry['starttime'] < query_time + (
                            30 * 60))) \
                            and ((matched_entry['station'] in select_sta) and (
                                matched_entry['component'] in select_comp)):
                        print(matched_entry['ASDF_tag']
                              )  #, os.path.join(matched_entry['path'], key))

                        # read in the data to obspy
                        temp_st = read(os.path.join(matched_entry['path'],
                                                    key))

                        # modify network header
                        temp_tr = temp_st[0]
                        temp_tr.stats.network = matched_entry['new_network']

                        # trim trace to start and endtime
                        temp_tr.trim(starttime=trace_starttime,
                                     endtime=trace_endtime)

                        # st.append(temp_tr)
                        st_dict[temp_tr.get_id()].append(temp_tr)

            # free memory
            temp_st = None
            temp_tr = None

            if not len(st_dict) == 0:
                # .__nonzero__():

                print('')
                print('Merging Traces from %s Stations....' % len(st_dict))
                # Attempt to merge all traces with matching ID'S (same keys in dict) in place
                # st.merge()

                for key in st_dict.keys():
                    if len(st_dict[key]) > 1:
                        temp_st = Stream(traces=st_dict[key])
                        # merge in place
                        # print('\tMerging %s in Stream:' % temp_st.count())
                        temp_st.merge()
                        # assign trace back to dictionary key if there is data
                        if temp_st.__nonzero__():
                            print("Station {0} has {1} Seconds of data".format(
                                key, temp_st[0].stats.endtime -
                                temp_st[0].stats.starttime))
                            st_dict[key] = temp_st[0]
                        elif not temp_st.__nonzero__():
                            print("No Data for: %s" % key)
                            # no data for station delete key
                            del st_dict[key]
                            continue
                    elif len(st_dict[key]) == 1:
                        print("Station {0} has {1} Seconds of data".format(
                            key, st_dict[key][0].stats.endtime -
                            st_dict[key][0].stats.starttime))
                        st_dict[key] = st_dict[key][0]
                    elif len(st_dict[key]) == 0:
                        # no data for station delete key
                        print("No Data for: %s" % key)
                        del st_dict[key]

                print(
                    '\nTrimming Traces to 20 mins around earthquake time....')

                # now trim the st object to 5 mins
                # before query time and 15 minutes afterwards

                for key in st_dict.keys():

                    st_dict[key] = st_dict[key].trim(starttime=trace_starttime,
                                                     endtime=trace_endtime,
                                                     pad=True,
                                                     fill_value=0)

                # st.trim(starttime=trace_starttime, endtime=trace_endtime, pad=True, fill_value=0)

                try:
                    # write traces into temporary directory
                    # for tr in st:
                    for key in st_dict.keys():
                        if type(st_dict[key]) == Stream:
                            #there is a problem with network codes (two stations named the same)
                            #ignore it for now
                            continue
                        st_dict[key].write(os.path.join(
                            temp_seed_out, st_dict[key].get_id() + ".MSEED"),
                                           format="MSEED")
                    print("\nWrote Temporary MiniSEED data to: " +
                          temp_seed_out)
                    print('')
                except:
                    print("Something Went Wrong!")

            else:
                print("No Data for Earthquake!")

            # free memory
            st_dict = None

            # Now requesting reference station data from IRIS if desired
            if self.ref_radioButton.isChecked():
                ref_dir = os.path.join(temp_seed_out, 'ref_data')

                # create ref directory
                if os.path.exists(ref_dir):
                    shutil.rmtree(ref_dir)
                os.mkdir(ref_dir)

                # request stations that are close to the selected stations

                # first use the coords lists to get a bounding box for array
                def calc_bounding_box(x, y):
                    min_x, max_x = (min(x), max(x))
                    min_y, max_y = (min(y), max(y))

                    return (min_x, max_x, min_y, max_y)

                bb = calc_bounding_box(self.station_coords[0],
                                       self.station_coords[1])

                # request data for near earthquake time up to 5 degrees from bounding box of array
                print(
                    '\nRequesting Waveform Data from Nearby Permanent Network Stations....'
                )

                client = Client("IRIS")
                self.ref_inv = client.get_stations(
                    network="AU",
                    starttime=UTCDateTime(quake_df['qtime'] - (5 * 60)),
                    endtime=UTCDateTime(quake_df['qtime'] + (15 * 60)),
                    minlongitude=bb[0] - 2,
                    maxlongitude=bb[1] + 2,
                    minlatitude=bb[2] - 2,
                    maxlatitude=bb[3] + 2,
                    level='channel')

                print(self.ref_inv)

                ref_st = Stream()

                # go through inventory and request timeseries data
                for net in self.ref_inv:
                    for stn in net:
                        try:
                            ref_st += client.get_waveforms(
                                network=net.code,
                                station=stn.code,
                                channel='*',
                                location='*',
                                starttime=UTCDateTime(quake_df['qtime'] -
                                                      (5 * 60)),
                                endtime=UTCDateTime(quake_df['qtime'] +
                                                    (15 * 60)))
                        except FDSNException:
                            print(
                                'No Data for Earthquake from Reference Station: '
                                + stn.code)

                        else:
                            # plot the reference stations
                            js_call = "addRefStation('{station_id}', {latitude}, {longitude});" \
                                .format(station_id=stn.code, latitude=stn.latitude,
                                        longitude=stn.longitude)
                            self.web_view.page().mainFrame(
                            ).evaluateJavaScript(js_call)

                try:
                    # write ref traces into temporary directory
                    for tr in ref_st:
                        tr.write(os.path.join(ref_dir, tr.id + ".MSEED"),
                                 format="MSEED")
                    print("Wrote Reference MiniSEED data to: " + ref_dir)
                    print('\nEarthquake Data Query Done!!!')
                except:
                    print("Something Went Wrong Writing Reference Data!")

                self.ref_inv.write(os.path.join(ref_dir, "ref_metadata.xml"),
                                   format="STATIONXML")