tsum = tsum + time.time() - start avgBPSosFilter = tsum / niter tsum = 0 for i in range(niter): trace.set_data(a + 1) start = time.time() trace.envelope() tsum = tsum + time.time() - start avgEnvelope = tsum / niter print("ObsPy test...") from obspy.core.trace import Trace from obspy.signal.filter import envelope traceObspy = Trace(data=a) traceObspy.detrend('demean') tsum = 0 for i in range(niter): traceObspy.data = a + 1 start = time.time() traceObspy.detrend('demean') tsum = tsum + time.time() - start y = traceObspy.data avgDemeanObspy = tsum / niter #print("Obspy average time for demean %e (s)"%(avgDemeanObspy)) tsum = 0 for i in range(niter): traceObspy.data = a + 1 start = time.time() traceObspy.detrend('linear') tsum = tsum + time.time() - start
def read_knet(filename): """Read Japanese KNET strong motion file. Args: filename (str): Path to possible KNET data file. kwargs (ref): Other arguments will be ignored. Returns: Stream: Obspy Stream containing three channels of acceleration data (cm/s**2). """ if not is_knet(filename): raise Exception('%s is not a valid KNET file' % filename) # Parse the header portion of the file with open(filename, 'rt') as f: lines = [next(f) for x in range(TEXT_HDR_ROWS)] hdr = {} coordinates = {} standard = {} hdr['network'] = 'BO' hdr['station'] = lines[5].split()[2] standard['station_name'] = '' # according to the powers that defined the Network.Station.Channel.Location # "standard", Location is a two character field. Most data providers, # including KNET here, don't provide this. We'll flag it as "--". hdr['location'] = '--' coordinates['latitude'] = float(lines[6].split()[2]) coordinates['longitude'] = float(lines[7].split()[2]) coordinates['elevation'] = float(lines[8].split()[2]) hdr['sampling_rate'] = float( re.search('\\d+', lines[10].split()[2]).group()) hdr['delta'] = 1 / hdr['sampling_rate'] standard['units'] = 'acc' dir_string = lines[12].split()[1].strip() # knet files have directions listed as N-S, E-W, or U-D, # whereas in kiknet those directions are '4', '5', or '6'. if dir_string in ['N-S', '4']: hdr['channel'] = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=False, is_north=True) elif dir_string in ['E-W', '5']: hdr['channel'] = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=False, is_north=False) elif dir_string in ['U-D', '6']: hdr['channel'] = get_channel_name(hdr['sampling_rate'], is_acceleration=True, is_vertical=True, is_north=False) else: raise Exception('Could not parse direction %s' % lines[12].split()[1]) scalestr = lines[13].split()[2] parts = scalestr.split('/') num = float(parts[0].replace('(gal)', '')) den = float(parts[1]) calib = num / den hdr['calib'] = calib duration = float(lines[11].split()[2]) hdr['npts'] = int(duration * hdr['sampling_rate']) timestr = ' '.join(lines[9].split()[2:4]) # The K-NET and KiK-Net data logger adds a 15s time delay # this is removed here sttime = datetime.strptime(timestr, TIMEFMT) - timedelta(seconds=15.0) # Shift the time to utc (Japanese time is 9 hours ahead) sttime = sttime - timedelta(seconds=9 * 3600.) hdr['starttime'] = sttime # read in the data - there is a max of 8 columns per line # the code below handles the case when last line has # less than 8 columns if hdr['npts'] % COLS_PER_LINE != 0: nrows = int(np.floor(hdr['npts'] / COLS_PER_LINE)) nrows2 = 1 else: nrows = int(np.ceil(hdr['npts'] / COLS_PER_LINE)) nrows2 = 0 data = np.genfromtxt(filename, skip_header=TEXT_HDR_ROWS, max_rows=nrows, filling_values=np.nan) data = data.flatten() if nrows2: skip_header = TEXT_HDR_ROWS + nrows data2 = np.genfromtxt(filename, skip_header=skip_header, max_rows=nrows2, filling_values=np.nan) data = np.hstack((data, data2)) nrows += nrows2 # apply the correction factor we're given in the header data *= calib # fill out the rest of the standard dictionary standard['horizontal_orientation'] = np.nan standard['instrument_period'] = np.nan standard['instrument_damping'] = np.nan standard['process_time'] = '' standard['process_level'] = 'V1' standard['sensor_serial_number'] = '' standard['instrument'] = '' standard['comments'] = '' standard['structure_type'] = '' standard['corner_frequency'] = np.nan standard['units'] = 'acc' standard['source'] = SRC standard['source_format'] = 'knet' hdr['coordinates'] = coordinates hdr['standard'] = standard # create a Trace from the data and metadata trace = Trace(data.copy(), Stats(hdr.copy())) # to match the max values in the headers, # we need to detrend/demean the data (??) trace.detrend('linear') trace.detrend('demean') stream = Stream(trace) return stream
def detect_stalta(self): STA_len = self.params["STA_len"] LTA_len = self.params["LTA_len"] STALTA_thresh = self.params["STALTA_thresh"] det_off_win = self.params["no_det_win"] try: devices = self.traces.data["device_id"].unique() except Exception as exception: logging.error(exception) devices = [] for device in devices: for channel in ["x", "y", "z"]: trace = self.traces.data[self.traces.data["device_id"] == device][channel].to_numpy() time = self.traces.data[self.traces.data["device_id"] == device]["cloud_t"] sr = self.traces.data[self.traces.data["device_id"] == device]["sr"].iloc[0] if len(trace) > int(np.ceil(sr * (STA_len + LTA_len))): # set new trace tr = Trace() tr.data = trace tr.stats.delta = 1 / sr tr.stats.channel = channel tr.stats.station = device # tr.filter("highpass", freq=0.2) tr.detrend(type="constant") tr_orig = tr.copy() std = np.std(tr_orig.data[:int(STA_len * sr)]) tr.trigger( "recstalta", sta=self.params["STA_len"], lta=self.params["LTA_len"], ) (ind, ) = np.where(tr.data > STALTA_thresh) if len(ind) > 0: det_time = time.iloc[ind[0]] past_detections = self.detections.data[ (self.detections.data["device_id"] == device) & (self.detections.data["cloud_t"] - det_time + det_off_win) > 0] if (past_detections.shape[0] == 0) & (std <= self.params["max_std"]): # Get event ID # timestamp timestamp = datetime.utcfromtimestamp(det_time) year = str(timestamp.year - 2000).zfill(2) month = str(timestamp.month).zfill(2) day = str(timestamp.day).zfill(2) hour = str(timestamp.hour).zfill(2) minute = str(timestamp.minute).zfill(2) detection_id = "D_" + year + month + day + hour + minute new_detection = pd.DataFrame( { "detection_id": detection_id, "device_id": device, "cloud_t": det_time, "mag1": None, "mag2": None, "mag3": None, "mag4": None, "mag5": None, "mag6": None, "mag7": None, "mag8": None, "mag9": None, "event_id": None, }, index=[0], ) # plot all detections and save in obj/detections folder if self.params["plot_detection"]: self.plot_detection(tr_orig, tr, device, detection_id, std) self.detections.update(new_detection)