def test_kilometer2degrees(self): """ Simple test of the convenience function. """ # Test if it works. assert kilometer2degrees(111.19492664455873, radius=6371) == 1.0 # Test if setting the radius actually does something. Round to avoid # some precision problems on different machines. assert round(kilometer2degrees(111.19492664455873, radius=6381), 5) == round(0.99843284751606332, 5)
def test_kilometer2degrees(self): """ Simple test of the convenience function. """ # Test if it works. self.assertEqual(kilometer2degrees(111.19492664455873, radius=6371), 1.0) # Test if setting the radius actually does something. Round to avoid # some precision problems on different machines. self.assertEqual(round(kilometer2degrees(111.19492664455873, radius=6381), 5), round(0.99843284751606332, 5))
def test_lcalda(self): """ Test that distances are set when geographic information is present and lcalda is True, and that they're not set when geographic information is missing or lcalca is false. """ stla, stlo, evla, evlo = -35.0, 100, 42.5, -37.5 meters, az, baz = gps2dist_azimuth(evla, evlo, stla, stlo) km = meters / 1000.0 gcarc = kilometer2degrees(km) # distances are set when lcalda True and all distance values set sac = SACTrace(lcalda=True, stla=stla, stlo=stlo, evla=evla, evlo=evlo) self.assertAlmostEqual(sac.az, az, places=4) self.assertAlmostEqual(sac.baz, baz, places=4) self.assertAlmostEqual(sac.dist, km, places=4) self.assertAlmostEqual(sac.gcarc, gcarc, places=4) # distances are not set when lcalda False and all distance values set sac = SACTrace(lcalda=False, stla=stla, stlo=stlo, evla=evla, evlo=evlo) self.assertIs(sac.az, None) self.assertIs(sac.baz, None) self.assertIs(sac.dist, None) self.assertIs(sac.gcarc, None) # distances are not set when lcalda True, not all distance values set sac = SACTrace(lcalda=True, stla=stla) self.assertIs(sac.az, None) self.assertIs(sac.baz, None) self.assertIs(sac.dist, None) self.assertIs(sac.gcarc, None) # exception raised when set_distances is forced but not all distances # values are set. NOTE: still have a problem when others are "None". sac = SACTrace(lcalda=True, stla=stla) self.assertRaises(SacHeaderError, sac._set_distances, force=True)
def test_estimate_region_no_magnitude(self): event = copy.deepcopy(self.event) event.magnitudes = [] region = estimate_region(event) self.assertEqual(self.event.origins[0].latitude, region["latitude"]) self.assertEqual(self.event.origins[0].longitude, region["longitude"]) self.assertEqual(region["maxradius"], kilometer2degrees(50) / 2)
def analyse_earthquake(self, event_obj): # Get event catalogue self.event_cat = self.ds.events comp_list = ['*Z', '*N', '*E'] # Launch the custom station/component selection dialog sel_dlg = selectionDialog(parent=self, sta_list=self.ds.waveforms.list()) if sel_dlg.exec_(): select_sta, bool_comp = sel_dlg.getSelected() query_comp = list(itertools.compress(comp_list, bool_comp)) # Open up a new stream object self.st = Stream() # use the ifilter functionality to extract desired streams to visualize for station in self.ds.ifilter(self.ds.q.station == map(lambda el: el.split('.')[1], select_sta), self.ds.q.channel == query_comp, self.ds.q.event == event_obj): for filtered_id in station.list(): if filtered_id == 'StationXML': continue self.st += station[filtered_id] if self.st.__nonzero__(): # Get quake origin info origin_info = event_obj.preferred_origin() or event_obj.origins[0] # Iterate through traces for tr in self.st: # Run Java Script to highlight all selected stations in station view js_call = "highlightStation('{station}')".format(station=tr.stats.network + '.' +tr.stats.station) self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call) # Get inventory for trace inv = self.ds.waveforms[tr.stats.network + '.' +tr.stats.station].StationXML sta_coords = inv.get_coordinates(tr.get_id()) dist, baz, _ = gps2dist_azimuth(sta_coords['latitude'], sta_coords['longitude'], origin_info.latitude, origin_info.longitude) dist_deg = kilometer2degrees(dist/1000.0) tt_model = TauPyModel(model='iasp91') arrivals = tt_model.get_travel_times(origin_info.depth/1000.0, dist_deg, ('P')) # Write info to trace header tr.stats.distance = dist tr.stats.ptt = arrivals[0] # Sort the st by distance from quake self.st.sort(keys=['distance']) self.update_waveform_plot()
def maps(station_list, origin, strike, dip, rake, plot_file): """ Plotting a map with epicenter and possible stations according to distance and a map with with the beachball """ # sets figure's dimensions _fig_x = 10 _fig_y = 10 fig = plt.figure(figsize=(_fig_x,_fig_y)) # calculating map space _max = max(_station.distance_by_origin for _station in station_list) _max = int(round(_max * 1000 *2)) _size = _max + int(round(_max/7.0)) _diff = kilometer2degrees(round(_size/(2*2.0*1000))) parallels = [round(origin.latitude,2), round((origin.latitude-_diff),2), round((origin.latitude+_diff),2)] meridians = [round(origin.longitude,2), round((origin.longitude-_diff),2), round((origin.longitude+_diff),2)] m = Basemap(projection='laea', lat_0 = origin.latitude, lon_0 = origin.longitude, lat_ts=origin.latitude, resolution = 'i', area_thresh = 0.1, width = _size, height = _size) m.drawparallels(parallels,labels=[1,0,0,0], color='grey', fontsize=10) m.drawmeridians(meridians,labels=[0,0,0,1], color='grey', fontsize=10) m.drawrivers(color='aqua') m.drawcoastlines(color='0.2') m.drawcountries(color='0.4') m.drawmapboundary(fill_color='aqua') m.fillcontinents(color='coral',lake_color='aqua') x,y = m(origin.longitude, origin.latitude) # epicenter m.scatter(x, y, 1, color="#FFFF00", marker="*", zorder=3, linewidths=2, edgecolor="k") # beachball ax = plt.gca() b = beach.beach([strike, dip, rake], xy=(x,y), width=35000, linewidth=1, facecolor='r') b.set_zorder(10) ax.add_collection(b) # stations for station in station_list: x,y = m(station.longitude, station.latitude) m.scatter(x, y, 150, color="#33CC00", marker="^", zorder=3, linewidths=1, edgecolor="k") plt.text(x+1800, y+3000, station.code, family="monospace", fontsize=12) fig.savefig(plot_file)
def test_query_circular(self, bing_ebank): latitude, longitude, minradius, maxradius = (40.5, -112.12, 0.035, 0.05) df = bing_ebank.read_index( latitude=latitude, longitude=longitude, maxradius=minradius, minradius=maxradius, ) for lat, lon in zip(df["latitude"], df["longitude"]): dist, _, _ = gps2dist_azimuth(latitude, longitude, lat, lon) assert minradius <= kilometer2degrees(dist / 1000.0) <= maxradius
def Get_location(la_s, lo_s, la_r, lo_r, radius=3389.5, flattening=0): """ Get the epicentral distance, azimuth and backazimuth """ dist, az, baz = gps2dist_azimuth(lat1=la_s, lon1=lo_s, lat2=la_r, lon2=lo_r, a=radius, f=flattening) epi = kilometer2degrees(dist, radius=radius) return epi, az, baz
def get_unkown(self): ## returns Dict {} # PARAMETERS describe the unkown parameters (The ones we are going to invert) # !only use these to create your observed data! PARAMETERS = { 'la_s': 'la_s', 'lo_s': 'lo_s', 'depth_s': 'depth_s', 'strike': 'strike', 'dip': 'dip', 'rake': 'rake', 'm_rr': 'm_rr', 'm_tt': 'm_tt', 'm_pp': 'm_pp', 'm_rt': 'm_rt', 'm_rp': 'm_rp', 'm_tp': 'm_tp', 'epi': 'epi', 'origin_time': 'origin_time', } # Source parameters PARAMETERS['la_s'] = 10 PARAMETERS['lo_s'] = 90 PARAMETERS['depth_s'] = 10000 # [m] PARAMETERS['strike'] = 79 #79 PARAMETERS['dip'] = 50 #50 PARAMETERS['rake'] = 20 #20 PARAMETERS['m_tt'] = 1.81e+22 # 3.81e+15 PARAMETERS['m_pp'] = -1.74e+24 # -4.74e+17 PARAMETERS['m_rr'] = 1.71e+24 # 4.71e+17 PARAMETERS['m_tp'] = -1.230000e+24 PARAMETERS['m_rt'] = 1.99e+23 # 3.99e+16 PARAMETERS['m_rp'] = -1.05e+23 # -8.05e+16 PARAMETERS['origin_time'] = obspy.UTCDateTime(2020, 1, 2, 3, 4, 5) PRIOR = self.get_prior() # -Greens function dist, az, baz = gps2dist_azimuth(lat1=PARAMETERS['la_s'], lon1=PARAMETERS['lo_s'], lat2=PRIOR['la_r'], lon2=PRIOR['lo_r'], a=PRIOR['radius'], f=0) PARAMETERS['baz'] = baz PARAMETERS['az'] = az PARAMETERS['epi'] = kilometer2degrees(dist, radius=PRIOR['radius']) return PARAMETERS
def filter_catalog(catalog,lon0,lat0,minrad,maxrad): temp_events = [] for ev in catalog: ev_lat = ev.origins[0]['latitude'] ev_lon = ev.origins[0]['longitude'] distaz = gps2dist_azimuth(ev_lat,ev_lon,lat0,lon0) dist_m = distaz[0] dist_deg = kilometer2degrees((dist_m/1000.0)) if dist_deg >= minrad and dist_deg <= maxrad: temp_events.append(ev) cat = Catalog(events=temp_events) return cat
def read_origin(event_str): """ Read the origin information from the REST file string :param event_str: Contents of file as list of str :type event_str: list :returns: :class:`obspy.core.event.Event` """ event = Event() head = event_str[0].split() try: gap = float(head[17]) except IndexError: gap = None origin = Origin( time=UTCDateTime( year=int(head[0]), julday=int(head[1]), hour=int(head[2]), minute=int(head[3])) + float(head[4]), latitude=float(head[5]), longitude=float(head[6]), depth=float(head[7]) * 1000, origin_quality=OriginQuality( standard_error=float(head[9]), azimuthal_gap=gap, used_phase_count=int(head[17])), longitude_errors=QuantityError( uncertainty=kilometer2degrees(float(head[12]))), latitude_errors=QuantityError( uncertainty=kilometer2degrees(float(head[11]))), depth_errors=QuantityError(uncertainty=float(head[13]) * 1000), method_id=ResourceIdentifier("smi:local/REST"), evaluation_mode="automatic") event.origins.append(origin) try: event.magnitudes.append(Magnitude( mag=float(head[19]), magnitude_type="M")) except IndexError: pass return event
def estimate_region(event: Event, min_length: float = 50.) -> dict: """ Estimate the region to find templates within given a triggering event. Parameters ---------- event The event that triggered this function min_length Minimum length in km for diameter of event circle around the triggering event Returns ------- Dictionary keyed by "latitude", "longitude" and "maxradius" Notes ----- Uses a basic Wells and Coppersmith relation, scaled by 1.25 times. """ from obspy.geodetics import kilometer2degrees try: origin = event.preferred_origin() or event.origins[0] except IndexError: Logger.error("Triggering event has no origin, not using.") return None try: magnitude = event.preferred_magnitude() or event.magnitudes[0] except IndexError: Logger.warning("Triggering event has no magnitude, using minimum " "length or {0}".format(min_length)) magnitude = None if magnitude: length = 10**((magnitude.mag - 5.08) / 1.16) # Wells and Coppersmith # Scale up a bit - for Darfield this gave 0.6 deg, but the aftershock # region is more like 1.2 deg radius length *= 1.25 else: length = min_length if length <= min_length: length = min_length length = kilometer2degrees(length) length /= 2. return { "latitude": origin.latitude, "longitude": origin.longitude, "maxradius": length }
def bin_filter(st, bin_lat0, bin_lon0, bin_radius): ''' Removes traces which lie outside of a circular bin. bin_radius must be given in degrees. ''' for tr in st: dist = gps2dist_azimuth(tr.stats.sac['stla'], tr.stats.sac['stlo'], bin_lat0, bin_lon0) dist_m = dist[0] dist_deg = kilometer2degrees(dist_m / 1000.0) if dist_deg > bin_radius: st.remove(tr)
def bin_filter(st,bin_lat0,bin_lon0,bin_radius): ''' Removes traces which lie outside of a circular bin. bin_radius must be given in degrees. ''' for tr in st: dist = gps2dist_azimuth(tr.stats.sac['stla'],tr.stats.sac['stlo'], bin_lat0, bin_lon0) dist_m = dist[0] dist_deg = kilometer2degrees(dist_m/1000.0) if dist_deg > bin_radius: st.remove(tr)
def test_query_circular(self, bing_ebank): """Test circular queries in bank.""" latitude, longitude, minradius, maxradius = (40.5, -112.12, 0.035, 0.05) with suppress_warnings(): # suppress install geographiclib warning df = bing_ebank.read_index( latitude=latitude, longitude=longitude, maxradius=minradius, minradius=maxradius, ) for lat, lon in zip(df["latitude"], df["longitude"]): dist, _, _ = gps2dist_azimuth(latitude, longitude, lat, lon) assert minradius <= kilometer2degrees(dist / 1000.0) <= maxradius
def compute_toa( evt: obspy.core.event.Event, slat: float, slon: float, phase: str, model: obspy.taup.TauPyModel ) -> Tuple[UTCDateTime, float, float, float]: """ Compute time of theoretical arrival for teleseismic events and a given teleseismic phase at the provided station. :param evt: Event to compute the arrival for. :type evt: obspy.core.event.Event :param slat: station latitude :type slat: float :param slon: station longitude :type slon: float :param phase: The teleseismic phase to consider. :type phase: str :param model: Taupymodel to use :type model: obspy.taup.TauPyModel :return: A Tuple holding: [the time of theoretical arrival (UTC), the apparent slowness in s/km, the ray parameter in s/deg, the back azimuth, the distance between station and event in deg] :rtype: Tuple[UTCDateTime, float, float, float] """ origin = evt.preferred_origin() or evt.origins[0] distance, baz, _ = gps2dist_azimuth(slat, slon, origin.latitude, origin.longitude) distance = kilometer2degrees(distance / 1000) # compute time of first arrival & ray parameter odepth = origin.depth or 10000 # Some events have no depth information # Throw out events that should not be used for RFs if (constants.maxdepth[phase] and constants.maxdepth[phase] < odepth/1000) \ or not ( constants.min_epid[phase] <= distance <= constants.max_epid[phase]): raise ValueError( f'Distance {distance} deg or origin depth {odepth}m should not be ' + 'used for RFs') arrival = model.get_travel_times(source_depth_in_km=odepth / 1000, distance_in_degree=distance, phase_list=[phase])[0] rayp_s_deg = arrival.ray_param_sec_degree rayp = rayp_s_deg / 111319.9 # apparent slowness toa = origin.time + arrival.time return toa, rayp, rayp_s_deg, baz, distance
def write_tt_to_file(fname, periods, bazs, dists, tts): with File(fname, 'r+') as f: # print('Writing to %s' % fname) grp = f.create_group('surface_waves') grp.create_dataset('backazimuths', data=bazs, dtype='f2') dists_deg = kilometer2degrees(dists, radius=3389.5e3) grp.create_dataset('distances', data=dists_deg, dtype='f4') grp.create_dataset('periods', data=periods, dtype='f2') for iperiod, period in enumerate(periods): grp_period = grp.create_group('period_%02d' % iperiod) grp_period.create_dataset('p_c', data=period) grp_period.create_dataset('f_c', data=1./period) for type in ['rayleigh', 'love']: grp_period.create_dataset('tt_%s' % type, data=tts[type][iperiod], chunks=tts[type][iperiod].shape, dtype='f2')
def set_map_parameters(mag): """Set the map parameters based on the event magnitude.""" # Min mag for aftershocks. min_mag = 3.1 max_mag = 9.8 aftershock_scale_factor = 2 if mag > 9: map_size_lat = 10 lat_line_spacing = 5 scalebar_length_km = 500 max_radius = 1000 elif mag >= 8.5: map_size_lat = 5 lat_line_spacing = 2 scalebar_length_km = 300 max_radius = 500 elif mag >= 8: map_size_lat = 4 lat_line_spacing = 2 scalebar_length_km = 200 max_radius = 400 elif mag >= 7: map_size_lat = 3 lat_line_spacing = 2 scalebar_length_km = 100 max_radius = 300 elif mag >= 6: map_size_lat = 2 lat_line_spacing = 1 scalebar_length_km = 100 max_radius = 200 min_mag = 1.8 max_mag = 8.5 aftershock_scale_factor = 1.9 else: map_size_lat = 1.5 lat_line_spacing = 0.5 scalebar_length_km = 50 max_radius = 100 min_mag = 1.3 max_mag = 8.0 aftershock_scale_factor = 1.8 max_radius = kilometer2degrees(max_radius) return map_size_lat, lat_line_spacing, scalebar_length_km, max_radius, min_mag, max_mag, aftershock_scale_factor
def set_stats(st, inv, ev): """ Sets needed information for rftn calculation in the stats dictionary for each trace in the Stream. This information consists of back_azimuth, distance, origin time, P wave arrival. :param st: Obspy Stream object containing one station 3 channels :param inv: Obpsy Inventory object containing stations :param ev: Obspy event object for the earthquake """ origin_time = ev.origins[0].time ev_lat = ev.origins[0].latitude ev_lon = ev.origins[0].longitude ev_dep_m = ev.origins[0].depth ev_resource_id = ev.resource_id.id sta_inv = inv.select(network=st[0].stats.network, station=st[0].stats.station) sta_lat = sta_inv[0][0].latitude sta_lon = sta_inv[0][0].longitude gcarc_m, baz, _ = gps2dist_azimuth(sta_lat, sta_lon, ev_lat, ev_lon) gcarc_deg = kilometer2degrees(gcarc_m/1000) rf = { 'origin_time': origin_time, 'ev_lat': ev_lat, 'ev_lon': ev_lon, 'ev_resource_id': ev_resource_id, 'ev_dep_m': ev_dep_m, 'sta_lat': sta_lat, 'sta_lon': sta_lon, 'gcarc': gcarc_deg, 'baz': baz, 'method': 'P' } for tr in st: tr.stats.rf = rf tr.stats['back_azimuth'] = baz
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs): """ Reads a NonLinLoc Hypocenter-Phase file to a :class:`~obspy.core.event.Catalog` object. .. note:: Coordinate conversion from coordinate frame of NonLinLoc model files / location run to WGS84 has to be specified explicitly by the user if necessary. .. note:: An example can be found on the :mod:`~obspy.io.nlloc` submodule front page in the documentation pages. :param filename: File or file-like object in text mode. :type coordinate_converter: func :param coordinate_converter: Function to convert (x, y, z) coordinates of NonLinLoc output to geographical coordinates and depth in meters (longitude, latitude, depth in kilometers). If left ``None``, the geographical coordinates in the "GEOGRAPHIC" line of NonLinLoc output are used. The function should accept three arguments x, y, z (each of type :class:`numpy.ndarray`) and return a tuple of three :class:`numpy.ndarray` (lon, lat, depth in kilometers). :type picks: list of :class:`~obspy.core.event.Pick` :param picks: Original picks used to generate the NonLinLoc location. If provided, the output event will include the original picks and the arrivals in the output origin will link to them correctly (with their ``pick_id`` attribute). If not provided, the output event will include (the rather basic) pick information that can be reconstructed from the NonLinLoc hypocenter-phase file. :rtype: :class:`~obspy.core.event.Catalog` """ if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() lines = data.splitlines() # remember picks originally used in location, if provided original_picks = picks if original_picks is None: original_picks = [] # determine indices of block start/end of the NLLOC output file indices_hyp = [None, None] indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("NLLOC "): indices_hyp[0] = i elif line.startswith("END_NLLOC"): indices_hyp[1] = i elif line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i if any([i is None for i in indices_hyp]): msg = ("NLLOC HYP file seems corrupt," " could not detect 'NLLOC' and 'END_NLLOC' lines.") raise RuntimeError(msg) # strip any other lines around NLLOC block lines = lines[indices_hyp[0]:indices_hyp[1]] # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S")) if coordinate_converter: # maximum likelihood origin location in km info line line = lines["HYPOCENTER"] x, y, z = coordinate_converter(*map(float, line.split()[1:7:2])) else: # maximum likelihood origin location lon lat info line line = lines["GEOGRAPHIC"] x, y, z = map(float, line.split()[8:13:2]) # maximum likelihood origin time info line line = lines["GEOGRAPHIC"] year, month, day, hour, minute = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, month, day, hour, minute, seconds) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() cat = Catalog(events=[event]) o = Origin() event.origins = [o] o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string)) cat.creation_info.creation_time = UTCDateTime() cat.creation_info.version = "ObsPy %s" % __version__ event.creation_info = CreationInfo(creation_time=creation_time, version=version) event.creation_info.version = version o.creation_info = CreationInfo(creation_time=creation_time, version=version) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() wid = WaveformStreamID(station_code=station) date, hourmin, sec = map(str, line[6:9]) t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id return cat
import numpy as np import matplotlib.pyplot as plt from scipy import stats from obspy.geodetics import kilometer2degrees infname = '../cps_data_dir/syndata_dir_all_EX/Tgr_10.0.txt' # infname = '/media/lili/BCD29BBBD29B787A/cps_data_dir/syndata_dir_all_DD/Tgr_10.0.txt' # infname = './syndata_dir_000_DD/Tgr_10.0.txt' inArr = np.loadtxt(infname) fig1, ax1 = plt.subplots() T = inArr[:, 2] DistArr = inArr[:, 3] ind = np.argsort(DistArr) DistArr = DistArr[ind] T = T[ind] DeltaArr = kilometer2degrees(DistArr) VgrArr = DistArr / T ax1.plot(DistArr, VgrArr, '--o', markersize=10) plt.ylabel('Vgr (km/s) ', fontsize=40) plt.xlabel('Distance (km)', fontsize=40) plt.title('Group Velocity', fontsize=40) ax1.tick_params(axis='x', labelsize=30) ax1.tick_params(axis='y', labelsize=30) plt.ylim([2.94, 3.1]) fig2, ax2 = plt.subplots() infname = '../cps_data_dir/syndata_dir_all_EX/Amp_10.0.txt' # infname = '/media/lili/BCD29BBBD29B787A/cps_data_dir/syndata_dir_all_DD/Amp_10.0.txt' inArr2 = np.loadtxt(infname) AmpArr = inArr2[:, 2] * 1000000. DistArr = inArr2[:, 3]
def Acces_Blindtest_check(): BLINDTEST_MSEED = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Database/data_Nienke/M5.0_3914855_deg_2019-09-22.mseed' BLINDTEST_XML = BLINDTEST_MSEED.replace(".mseed", ".xml") # Initiate Parameters: get_parameters = Get_Paramters() PRIOR = get_parameters.get_prior() VALUES = get_parameters.specifications() VALUES['npts'] = 2000 VALUES[ 'directory'] = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Blindtest/check_waveforms' VALUES['blind'] = True # st = read(VALUES['directory'] + '/bw.mseed') # st_reject = read(VALUES['directory'] + '/bw_reject.mseed') # Initiate the databases from instaseis: db = instaseis.open_db(PRIOR['VELOC']) tr_obs = obspy.read(BLINDTEST_MSEED) # tr_obs.plot(outfile=VALUES['directory'] + '/Observed') tr_obs.integrate() tr_obs.plot(outfile=VALUES['directory'] + '/Observed_integrated') source = instaseis.Source.parse(BLINDTEST_XML) blindtest = Blindtest() events = blindtest.get_events(BLINDTEST_XML) # get_parameters.get_prior_blindtest(events[0]) time, depth, la_s, lo_s = blindtest.get_pref_origin(events[0]) dist, az, baz = gps2dist_azimuth(lat1=la_s, lon1=lo_s, lat2=PRIOR['la_r'], lon2=PRIOR['lo_r'], a=PRIOR['radius'], f=0) epi = kilometer2degrees(dist, radius=PRIOR['radius']) PRIOR['az'] = az PRIOR['baz'] = baz PRIOR['epi']['range_min'] = epi - 5 PRIOR['epi']['range_max'] = epi + 5 PRIOR['epi']['spread'] = 1 PRIOR['depth']['range_min'] = depth - 10000 PRIOR['depth']['range_max'] = depth + 10000 PRIOR['network'] = tr_obs.traces[0].meta.network PRIOR['location'] = tr_obs.traces[0].meta.location PRIOR['station'] = tr_obs.traces[0].meta.station est_noise = Create_observed(PRIOR, db) create = Source_code(PRIOR['VELOC_taup']) traces_obs, p_obs, s_obs, p_time_obs, s_time_obs = create.get_window_obspy( tr_obs, epi, depth, time, VALUES['npts']) PRIOR['var_est'] = est_noise.get_var_data(p_time_obs, tr_obs) obs_time = Create_observed(PRIOR, db) time_at_receiver = obs_time.get_receiver_time(epi, depth, time) plt.figure() catalog_path = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Additional_scripts/MQScatalog_withFrequencies/MQS_absolute_withFrequencyInfo.xml' events_catalog = blindtest.get_events(catalog_path) for v in events_catalog: t, d, lat_ev, lo_ev = blindtest.get_pref_origin(v) if time.date == t.date: Pick_event = v break PRIOR['M0'] = blindtest.get_pref_scalarmoment(Pick_event) picks_surface = get_phase_picks(Pick_event, pick_type='surface') R_env_obs, L_env_obs = blindtest.pick_sw(tr_obs, picks_surface, epi, PRIOR, VALUES['npts'], VALUES['directory'], plot_modus=True) start_sample = create_starting_sample() strike = 243.423396191 dip = 34.436087773 rake = 164.912874159 from Seismogram import Seismogram from Misfit import Misfit misfit = Misfit(VALUES['directory']) seis = Seismogram(PRIOR, db) epi = epi - 3 depth = depth # --------------------------------------------------------------------------------------------------------------- # dict = geo.Geodesic(a=PRIOR['radius'], f=0).ArcDirect(lat1=PRIOR['la_r'], lon1=PRIOR['lo_r'], azi1=PRIOR['baz'], a12=epi, outmask=1929) d_syn, traces_syn, sources = seis.get_seis_manual(la_s=dict['lat2'], lo_s=dict['lon2'], depth=depth, strike=strike, dip=dip, rake=rake, time=time, M0=PRIOR['M0'], sdr=VALUES['sdr']) R_env_syn, L_env_syn = blindtest.pick_sw(traces_syn, picks_surface, epi, PRIOR, VALUES['npts'], VALUES['directory'], plot_modus=False) traces_syn.plot(outfile=VALUES['directory'] + '/syntethic') total_syn, p_syn, s_syn, p_time_syn, s_time_syn = create.get_window_obspy( traces_syn, epi, depth, time, VALUES['npts']) ax1 = plt.subplot2grid((5, 1), (0, 0)) ax1.plot(zero_to_nan(p_syn.traces[0].data), c='r', linewidth=0.3) ax1.plot(zero_to_nan(p_obs.traces[0].data), c='k', linestyle=':', linewidth=0.3) plt.tight_layout() ax2 = plt.subplot2grid((5, 1), (1, 0)) ax2.plot(zero_to_nan(p_syn.traces[1].data), c='r', linewidth=0.3) ax2.plot(zero_to_nan(p_obs.traces[1].data), c='k', linestyle=':', linewidth=0.3) plt.tight_layout() ax3 = plt.subplot2grid((5, 1), (2, 0)) ax3.plot(zero_to_nan(s_syn.traces[0].data), c='r', linewidth=0.3) ax3.plot(zero_to_nan(s_obs.traces[0].data), c='k', linewidth=0.3) plt.tight_layout() ax4 = plt.subplot2grid((5, 1), (3, 0)) ax4.plot(zero_to_nan(s_syn.traces[1].data), c='r', linewidth=0.3) ax4.plot(zero_to_nan(s_obs.traces[1].data), c='k', linewidth=0.3) plt.tight_layout() ax5 = plt.subplot2grid((5, 1), (4, 0)) ax5.plot(zero_to_nan(s_syn.traces[2].data), c='r', linewidth=0.3) ax5.plot(zero_to_nan(s_obs.traces[2].data), c='k', linewidth=0.3) plt.tight_layout() plt.savefig(VALUES['directory'] + '/%.2f_%.2f.pdf' % (epi, depth)) plt.close() # time = ax1 = plt.subplot2grid((3, 1), (0, 0)) ax1.plot(zero_to_nan(total_syn.traces[0].data), c='r', linewidth=0.5) ax1.plot(zero_to_nan(traces_obs.traces[0].data), c='k', linestyle=':', linewidth=0.5) ax1.set_title('SYNTHETIC: = epi: %.2f REAL: epi = %.2f (depth fixed' % (epi, epi + 3)) plt.tight_layout() ax2 = plt.subplot2grid((3, 1), (1, 0)) ax2.plot(zero_to_nan(total_syn.traces[1].data), c='r', linewidth=0.5) ax2.plot(zero_to_nan(traces_obs.traces[1].data), c='k', linestyle=':', linewidth=0.5) plt.tight_layout() ax3 = plt.subplot2grid((3, 1), (2, 0)) ax3.plot(zero_to_nan(total_syn.traces[2].data), c='r', linewidth=0.5) ax3.plot(zero_to_nan(traces_obs.traces[2].data), c='k', linestyle=':', linewidth=0.5) plt.tight_layout() plt.savefig(VALUES['directory'] + '/PS_%.2f_%.2f.pdf' % (epi, depth)) plt.close() Xi_bw_new, time_shift_new, amplitude = misfit.CC_stream( p_obs, p_syn, s_obs, s_syn, p_time_obs, p_time_syn) s_z_new = 0.1 * Xi_bw_new[0] s_r_new = 0.1 * Xi_bw_new[1] s_t_new = 1 * Xi_bw_new[2] p_z_new = 5 * Xi_bw_new[3] p_r_new = 5 * Xi_bw_new[4] bw_new = s_z_new + s_r_new + s_t_new + p_z_new + p_r_new Xi_R_new = misfit.SW_L2(R_env_obs, R_env_syn, PRIOR['var_est'], amplitude) Xi_L_new = misfit.SW_L2(L_env_obs, L_env_syn, PRIOR['var_est'], amplitude) R_dict_new = {} rw_new = 0 for j, v in enumerate(Xi_R_new): R_dict_new.update({'R_%i_new' % j: v}) rw_new += v L_dict_new = {} lw_new = 0 for j, v in enumerate(Xi_L_new): L_dict_new.update({'L_%i_new' % j: v}) lw_new += v Xi_new = bw_new + rw_new + lw_new a = 1
def Acces_Blindtest(): # BLINDTEST_MSEED = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Database/data_Nienke/M3.5_8213363_deg_2019-02-15.mseed' BLINDTEST_MSEED = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Database/data_Nienke/M5.0_3914855_deg_2019-09-22.mseed' BLINDTEST_XML = BLINDTEST_MSEED.replace(".mseed", ".xml") # Initiate Parameters: get_parameters = Get_Paramters() PRIOR = get_parameters.get_prior() VALUES = get_parameters.specifications() VALUES['npts'] = 30000 VALUES[ 'directory'] = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Blindtest' # st = read(VALUES['directory'] + '/bw.mseed') # st_reject = read(VALUES['directory'] + '/bw_reject.mseed') # Initiate the databases from instaseis: db = instaseis.open_db(PRIOR['VELOC']) tr_obs = obspy.read(BLINDTEST_MSEED) # tr_obs.plot(outfile=VALUES['directory'] + '/Observed') tr_obs.integrate() # tr_obs.plot(outfile=VALUES['directory'] + '/Observed_integrated') # source = instaseis.Source.parse(BLINDTEST_XML) blindtest = Blindtest() events = blindtest.get_events(BLINDTEST_XML) # get_parameters.get_prior_blindtest(events[0]) time, depth, la_s, lo_s = blindtest.get_pref_origin(events[0]) dist, az, baz = gps2dist_azimuth(lat1=la_s, lon1=lo_s, lat2=PRIOR['la_r'], lon2=PRIOR['lo_r'], a=PRIOR['radius'], f=0) epi = kilometer2degrees(dist, radius=PRIOR['radius']) PRIOR['az'] = az PRIOR['baz'] = baz PRIOR['epi']['range_min'] = epi - 5 PRIOR['epi']['range_max'] = epi + 5 PRIOR['epi']['spread'] = 1 PRIOR['depth']['range_min'] = depth - 10000 PRIOR['depth']['range_max'] = depth + 10000 PRIOR['network'] = tr_obs.traces[0].meta.network PRIOR['location'] = tr_obs.traces[0].meta.location PRIOR['station'] = tr_obs.traces[0].meta.station est_noise = Create_observed(PRIOR, db) create = Source_code(PRIOR['VELOC_taup']) traces_obs, p_obs, s_obs, start_time_p, start_time_s = create.get_window_obspy( tr_obs, epi, depth, time, VALUES['npts']) PRIOR['var_est'] = est_noise.get_var_data(start_time_p, tr_obs) # time_at_receiver = create.get_receiver_time(epi,depth, time) plt.figure() catalog_path = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Additional_scripts/MQScatalog_withFrequencies/MQS_absolute_withFrequencyInfo.xml' catalog = Blindtest() events_catalog = catalog.get_events(catalog_path) for v in events_catalog: t, d, lat_ev, lo_ev = catalog.get_pref_origin(v) if time.date == t.date: Pick_event = v break PRIOR['M0'] = catalog.get_pref_scalarmoment(Pick_event) picks_surface = get_phase_picks(Pick_event, pick_type='surface') R_env_obs, L_env_obs = blindtest.pick_sw(tr_obs, picks_surface, epi, PRIOR, 30000, VALUES['directory'], plot_modus=False) start_sample = create_starting_sample() strike = np.random.uniform(PRIOR['strike']['range_min'], PRIOR['strike']['range_max']) dip = np.random.uniform(PRIOR['dip']['range_min'], PRIOR['dip']['range_max']) rake = np.random.uniform(PRIOR['rake']['range_min'], PRIOR['rake']['range_max']) sample_path = start_sample.get_sample_manual( epi, depth, strike, dip, rake, VALUES['directory'] + '/Blindtest_trialrun_sample.txt') # sample_path = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Blindtest/Blindtest_trialrun_sample.txt' mcmc = MCMC_stream(R_env_obs=R_env_obs, L_env_obs=L_env_obs, total_traces_obs=traces_obs, P_traces_obs=p_obs, S_traces_obs=s_obs, PRIOR=PRIOR, db=db, specification_values=VALUES, time_at_receiver=time, start_sample_path=sample_path, picked_events=picks_surface, full_obs_trace=tr_obs, P_start=start_time_p, S_start=start_time_s) mcmc.start_MCMC(VALUES['directory'] + '/Blindtest_trialrun.txt')
def build_tt_tables(minlat=None,maxlat=None,minlon=None,maxlon=None,channel_codes=['EH','BH','HH'],db=None,maxdist=500.,source_depth=5.): """ channel_codes select channels that start with those codes maximum distance is in km source depth is generally set to the average earthquake depth for the region you are working for more granularity use the 3D associator """ # Create a connection to an sqlalchemy database tt_engine=create_engine(db,echo=False) tt_stations_1D.BaseTT1D.metadata.create_all(tt_engine) TTSession=sessionmaker(bind=tt_engine) tt_session=TTSession() # Create a cliet to IRIS FDSN fdsnclient=fdsn.Client("IRIS") # Create an obspy inventory of stations #http://docs.obspy.org/packages/autogen/obspy.clients.fdsn.client.Client.get_stations.html#obspy.clients.fdsn.client.Client.get_stations inv=fdsnclient.get_stations(minlatitude=minlat,maxlatitude=maxlat,minlongitude=minlon,maxlongitude=maxlon,level='channel') # Plot our results just for fun inv.plot(projection='ortho',color_per_network='True') # Now save these station into the 1D travel-time table database # The associator could be modified to interact with Obspy Inventory objects for net in inv: network=net.code for sta in net: loccodes=[] for ch in sta: # print(ch) # print(dir(ch)) for cc in channel_codes: if re.match(cc,ch.code): if not ch.location_code in loccodes: loccodes.append(ch.location_code) for loc in loccodes: station=tt_stations_1D.Station1D(sta.code,network,loc,sta.latitude,sta.longitude,sta.elevation) # Save the station locations in the database tt_session.add(station) tt_session.commit() # Now we have to build our traveltime lookup tables # We will use IASP91 here but obspy.taup does let you build your own model velmod=taup.TauPyModel(model='iasp91') # Define our distances we want to use in our lookup table delta_distance=1. # km for spacing tt calculations # Probably better to use a progressive type scheme instead of linear, but this is an example distance_km=np.arange(0,maxdist+delta_distance,delta_distance) for d_km in distance_km: d_deg=geodetics.kilometer2degrees(d_km) ptimes=[] stimes=[] p_arrivals=velmod.get_travel_times(source_depth_in_km=source_depth, distance_in_degree=d_deg,phase_list=['P','p']) for p in p_arrivals: ptimes.append(p.time) s_arrivals=velmod.get_travel_times(source_depth_in_km=source_depth, distance_in_degree=d_deg,phase_list=['S','s']) for s in s_arrivals: stimes.append(s.time) tt_entry=tt_stations_1D.TTtable1D(d_km,d_deg,np.min(ptimes),np.min(stimes),np.min(stimes)-np.min(ptimes)) tt_session.add(tt_entry) tt_session.commit() # Probably faster to do the commit outside of loop but oh well tt_session.close() return inv
def build_tt_tables(minlat=None, maxlat=None, minlon=None, maxlon=None, channel_codes=['EH', 'BH', 'HH'], db=None, maxdist=500., source_depth=5.): """ channel_codes select channels that start with those codes maximum distance is in km source depth is generally set to the average earthquake depth for the region you are working for more granularity use the 3D associator """ # Create a connection to an sqlalchemy database tt_engine = create_engine(db, echo=False) tt_stations_1D.BaseTT1D.metadata.create_all(tt_engine) TTSession = sessionmaker(bind=tt_engine) tt_session = TTSession() # Create a cliet to IRIS FDSN fdsnclient = fdsn.Client("IRIS") # Create an obspy inventory of stations #http://docs.obspy.org/packages/autogen/obspy.clients.fdsn.client.Client.get_stations.html#obspy.clients.fdsn.client.Client.get_stations inv = fdsnclient.get_stations(minlatitude=minlat, maxlatitude=maxlat, minlongitude=minlon, maxlongitude=maxlon, level='channel') # Plot our results just for fun inv.plot(projection='ortho', color_per_network='True') # Now save these station into the 1D travel-time table database # The associator could be modified to interact with Obspy Inventory objects for net in inv: network = net.code for sta in net: loccodes = [] for ch in sta: # print(ch) # print(dir(ch)) for cc in channel_codes: if re.match(cc, ch.code): if not ch.location_code in loccodes: loccodes.append(ch.location_code) for loc in loccodes: station = tt_stations_1D.Station1D(sta.code, network, loc, sta.latitude, sta.longitude, sta.elevation) # Save the station locations in the database tt_session.add(station) tt_session.commit() # Now we have to build our traveltime lookup tables # We will use IASP91 here but obspy.taup does let you build your own model velmod = taup.TauPyModel(model='iasp91') # Define our distances we want to use in our lookup table delta_distance = 1. # km for spacing tt calculations # Probably better to use a progressive type scheme instead of linear, but this is an example distance_km = np.arange(0, maxdist + delta_distance, delta_distance) for d_km in distance_km: d_deg = geodetics.kilometer2degrees(d_km) ptimes = [] stimes = [] p_arrivals = velmod.get_travel_times(source_depth_in_km=source_depth, distance_in_degree=d_deg, phase_list=['P', 'p']) for p in p_arrivals: ptimes.append(p.time) s_arrivals = velmod.get_travel_times(source_depth_in_km=source_depth, distance_in_degree=d_deg, phase_list=['S', 's']) for s in s_arrivals: stimes.append(s.time) tt_entry = tt_stations_1D.TTtable1D(d_km, d_deg, np.min(ptimes), np.min(stimes), np.min(stimes) - np.min(ptimes)) tt_session.add(tt_entry) tt_session.commit( ) # Probably faster to do the commit outside of loop but oh well tt_session.close() return inv
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs): """ Reads a NonLinLoc Hypocenter-Phase file to a :class:`~obspy.core.event.Catalog` object. .. note:: Coordinate conversion from coordinate frame of NonLinLoc model files / location run to WGS84 has to be specified explicitly by the user if necessary. .. note:: An example can be found on the :mod:`~obspy.io.nlloc` submodule front page in the documentation pages. :param filename: File or file-like object in text mode. :type coordinate_converter: func :param coordinate_converter: Function to convert (x, y, z) coordinates of NonLinLoc output to geographical coordinates and depth in meters (longitude, latitude, depth in kilometers). If left ``None``, NonLinLoc (x, y, z) output is left unchanged (e.g. if it is in geographical coordinates already like for NonLinLoc in global mode). The function should accept three arguments x, y, z (each of type :class:`numpy.ndarray`) and return a tuple of three :class:`numpy.ndarray` (lon, lat, depth in kilometers). :type picks: list of :class:`~obspy.core.event.Pick` :param picks: Original picks used to generate the NonLinLoc location. If provided, the output event will include the original picks and the arrivals in the output origin will link to them correctly (with their ``pick_id`` attribute). If not provided, the output event will include (the rather basic) pick information that can be reconstructed from the NonLinLoc hypocenter-phase file. :rtype: :class:`~obspy.core.event.Catalog` """ if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() lines = data.splitlines() # remember picks originally used in location, if provided original_picks = picks if original_picks is None: original_picks = [] # determine indices of block start/end of the NLLOC output file indices_hyp = [None, None] indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("NLLOC "): indices_hyp[0] = i elif line.startswith("END_NLLOC"): indices_hyp[1] = i elif line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i if any([i is None for i in indices_hyp]): msg = ("NLLOC HYP file seems corrupt," " could not detect 'NLLOC' and 'END_NLLOC' lines.") raise RuntimeError(msg) # strip any other lines around NLLOC block lines = lines[indices_hyp[0]:indices_hyp[1]] # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S")) # maximum likelihood origin location info line line = lines["HYPOCENTER"] x, y, z = map(float, line.split()[1:7:2]) if coordinate_converter: x, y, z = coordinate_converter(x, y, z) # origin time info line line = lines["GEOGRAPHIC"] year, month, day, hour, minute = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, month, day, hour, minute, seconds) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() cat = Catalog(events=[event]) o = Origin() event.origins = [o] o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string)) cat.creation_info.creation_time = UTCDateTime() cat.creation_info.version = "ObsPy %s" % __version__ event.creation_info = CreationInfo(creation_time=creation_time, version=version) event.creation_info.version = version o.creation_info = CreationInfo(creation_time=creation_time, version=version) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() wid = WaveformStreamID(station_code=station) date, hourmin, sec = map(str, line[6:9]) t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id return cat
tr.stats.coordinates = AttribDict({ 'latitude': tr.stats.coordinates['latitude'], 'longitude': tr.stats.coordinates['longitude'], 'elevation': tr.stats.coordinates['elevation'] }) az12, az21, dist = reviewData.pyproj_distaz( tr.stats.coordinates['latitude'], tr.stats.coordinates['longitude'], evlat, evlon, ) arr = model.get_travel_times(source_depth_in_km=dep, distance_in_degree=kilometer2degrees(dist), phase_list=['p', 'P']) tr.stats.rdist = dist tr.stats.ptime = arr[0].time # Sort by distance from fault. st.sort(['rdist']) # Determine picktimes. picktimes = [event_time + tr.stats.ptime for tr in st] # Demean data. st.detrend('demean') # Open interactive data analysis. zp = reviewData.InteractivePlot(st, picktimes=picktimes)
def _read_single_hypocenter(lines, coordinate_converter, original_picks): """ Given a list of lines (starting with a 'NLLOC' line and ending with a 'END_NLLOC' line), parse them into an Event. """ try: # some paranoid checks.. assert lines[0].startswith("NLLOC ") assert lines[-1].startswith("END_NLLOC") for line in lines[1:-1]: assert not line.startswith("NLLOC ") assert not line.startswith("END_NLLOC") except Exception: msg = ("This should not have happened, please report this as a bug at " "https://github.com/obspy/obspy/issues.") raise Exception(msg) indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines[:-1]]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) # new NLLoc > 6.0 seems to add prefix 'run:' before date if date.startswith('run:'): date = date[4:] signature = signature.strip() creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S")) if coordinate_converter: # maximum likelihood origin location in km info line line = lines["HYPOCENTER"] x, y, z = coordinate_converter(*map(float, line.split()[1:7:2])) else: # maximum likelihood origin location lon lat info line line = lines["GEOGRAPHIC"] y, x, z = map(float, line.split()[8:13:2]) # maximum likelihood origin time info line line = lines["GEOGRAPHIC"] year, mon, day, hour, min = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] if "COMMENT" in lines: comment = lines["COMMENT"].strip() comment = comment.strip('\'"') comment = comment.strip() hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() o = Origin() event.origins = [o] event.preferred_origin_id = o.resource_id o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string, force_resource_id=False)) event.comments.append(Comment(text=comment, force_resource_id=False)) # SIGNATURE field's first item is LOCSIG, which is supposed to be # 'Identification of an individual, institiution or other entity' # according to # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_ # so use it as author in creation info event.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) o.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() # network codes are not used by NonLinLoc, so they can not be known # when reading the .hyp file.. to conform with QuakeML standard set an # empty network code wid = WaveformStreamID(network_code="", station_code=station) # have to split this into ints for overflow to work correctly date, hourmin, sec = map(str, line[6:9]) ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])] hm = [int(hourmin[:2]), int(hourmin[2:4])] t = UTCDateTime(*(ymd + hm), strict=False) + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id event.scope_resource_ids() return event
def get_inventory( client, tribe: Union[RealTimeTribe, Tribe], triggering_event: Event = None, location: dict = None, starttime: UTCDateTime = None, max_distance: float = 1000., n_stations: int = 10, duration: float = 10, level: str = "channel", channel_list: Union[list, tuple] = ("EH?", "HH?"), ) -> Inventory: """ Get a suitable inventory for a tribe - selects the most used, closest stations. Parameters ---------- client: Obspy client with a get_stations service. tribe: Tribe or RealTimeTribe of templates to query for stations. triggering_event: Event with at least an origin to calculate distances from - if not specified will use `location` location: Dictionary with "latitude" and "longitude" keys - only used if `triggering event` is not specified. starttime: Start-time for station search - only used if `triggering_event` is not specified. max_distance: Maximum distance from `triggering_event.preferred_origin` or `location` to find stations. Units: km n_stations: Maximum number of stations to return duration: Duration stations must be active for. Units: days level: Level for inventory parsable by `client.get_stations`. channel_list List of channel-codes to be acquired. If `None` then all channels will be searched. Returns ------- Inventory of the most used, closest stations. """ inv = Inventory(networks=[], source=None) if triggering_event is not None: try: origin = ( triggering_event.preferred_origin() or triggering_event.origins[0]) except IndexError: Logger.error("Triggering event has no origin") return inv lat = origin.latitude lon = origin.longitude _starttime = origin.time else: lat = location["latitude"] lon = location["longitude"] _starttime = starttime for channel_str in channel_list or ["*"]: try: inv += client.get_stations( startbefore=_starttime, endafter=_starttime + (duration * 86400), channel=channel_str, latitude=lat, longitude=lon, maxradius=kilometer2degrees(max_distance), level=level) except FDSNNoDataException: continue if len(inv) == 0: return inv # Calculate distances station_count = Counter( [pick.waveform_id.station_code for template in tribe for pick in template.event.picks]) sta_dist = [] for net in inv: for sta in net: dist = locations2degrees( lat1=lat, long1=lon, lat2=sta.latitude, long2=sta.longitude) sta_dist.append((sta.code, dist, station_count[sta.code])) sta_dist.sort(key=lambda _: (-_[2], _[1])) inv_out = inv.select(station=sta_dist[0][0]) for sta in sta_dist[1:n_stations]: inv_out += inv.select(station=sta[0]) return inv_out
def stochastic_simulation(home, project_name, rupture_name, sta, sta_lon, sta_lat, component, model_name, rise_time_depths, moho_depth_in_km, total_duration=100, hf_dt=0.01, stress_parameter=50, kappa=0.04, Qexp=0.6, Pwave=False, Swave=True, high_stress_depth=1e4): ''' Run stochastic HF sims stress parameter is in bars ''' from numpy import genfromtxt, pi, logspace, log10, mean, where, exp, arange, zeros, argmin, rad2deg, arctan2, real from pyproj import Geod from obspy.geodetics import kilometer2degrees from obspy.taup import TauPyModel from mudpy.forward import get_mu, write_fakequakes_hf_waveforms_one_by_one, read_fakequakes_hypo_time from obspy import Stream, Trace from sys import stdout import warnings #print out what's going on: out = '''Running with input parameters: home = %s project_name = %s rupture_name = %s sta = %s sta_lon = %s sta_lat = %s model_name = %s rise_time_depths = %s moho_depth_in_km = %s total_duration = %s hf_dt = %s stress_parameter = %s kappa = %s Qexp = %s component = %s Pwave = %s Swave = %s high_stress_depth = %s ''' % (home, project_name, rupture_name, sta, str(sta_lon), str(sta_lat), model_name, str(rise_time_depths), str(moho_depth_in_km), str(total_duration), str(hf_dt), str(stress_parameter), str(kappa), str(Qexp), str(component), str(Pwave), str(Swave), str(high_stress_depth)) print(out) # rupture=rupture_name.split('.')[0]+'.'+rupture_name.split('.')[1] # log=home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.1cpu.log' # logfile=open(log,'w') # logfile.write(out) #print 'stress is '+str(stress_parameter) #I don't condone it but this cleans up the warnings warnings.filterwarnings("ignore") #Load the source fault = genfromtxt(home + project_name + '/output/ruptures/' + rupture_name) #Onset times for each subfault onset_times = fault[:, 12] #load velocity structure structure = genfromtxt(home + project_name + '/structure/' + model_name) #Frequencies vector f = logspace(log10(hf_dt), log10(1 / (2 * hf_dt)) + 0.01, 50) omega = 2 * pi * f #Output time vector (0 is origin time) t = arange(0, total_duration, hf_dt) #Projection object for distance calculations g = Geod(ellps='WGS84') #Create taup velocity model object, paste on top of iaspei91 #taup_create.build_taup_model(home+project_name+'/structure/bbp_norcal.tvel',output_folder=home+project_name+'/structure/') velmod = TauPyModel(model=home + project_name + '/structure/iquique', verbose=True) #Get epicentral time epicenter, time_epi = read_fakequakes_hypo_time(home, project_name, rupture_name) #Moments slip = (fault[:, 8]**2 + fault[:, 9]**2)**0.5 subfault_M0 = slip * fault[:, 10] * fault[:, 11] * fault[:, 13] subfault_M0 = subfault_M0 * 1e7 #to dyne-cm M0 = subfault_M0.sum() relative_subfault_M0 = subfault_M0 / M0 Mw = (2. / 3) * (log10(M0 * 1e-7) - 9.1) #Corner frequency scaling i = where(slip > 0)[0] #Non-zero faults N = len(i) #number of subfaults dl = mean((fault[:, 10] + fault[:, 11]) / 2) #predominant length scale dl = dl / 1000 # to km #Tau=p perturbation tau_perturb = 0.1 #Deep faults receive a higher stress stress_multiplier = 3 print('... working on ' + component + ' component semistochastic waveform for station ' + sta) #initalize output seismogram tr = Trace() tr.stats.station = sta tr.stats.delta = hf_dt tr.stats.starttime = time_epi #info for sac header (added at the end) az, backaz, dist_m = g.inv(epicenter[0], epicenter[1], sta_lon, sta_lat) dist_in_km = dist_m / 1000. hf = zeros(len(t)) # out='''Parameters before we get into subfault calculations: # rupture_name = %s # epicenter = %s # time_epi = %s # M0 = %E # Mw = %10.4f # Num_Subfaults = %i # dl = %.2f # Dist_in_km = %10.4f # '''%(rupture_name,str(epicenter),str(time_epi),M0,Mw,int(N),dl,dist_in_km) # print out # logfile.write(out) #Loop over subfaults # earliestP=1e10 #something outrageously high # earliestP_kfault=1e10 for kfault in range(len(fault)): #Print status to screen if kfault % 150 == 0: if kfault == 0: stdout.write(' [') stdout.flush() stdout.write('.') stdout.flush() if kfault == len(fault) - 1: stdout.write(']\n') stdout.flush() #Include only subfaults with non-zero slip if subfault_M0[kfault] > 0: #Get subfault to station distance lon_source = fault[kfault, 1] lat_source = fault[kfault, 2] azimuth, baz, dist = g.inv(lon_source, lat_source, sta_lon, sta_lat) dist_in_degs = kilometer2degrees(dist / 1000.) #Source depth? z_source = fault[kfault, 3] #No change stress = stress_parameter #Is subfault in an SMGA? #radius_in_km=15.0 #smga_center_lon=-69.709200 #smga_center_lat=-19.683600 #in_smga=is_subfault_in_smga(lon_source,lat_source,smga_center_lon,smga_center_lat,radius_in_km) # ###Apply multiplier? #if in_smga==True: # stress=stress_parameter*stress_multiplier # print "%.4f,%.4f is in SMGA, stress is %d" % (lon_source,lat_source,stress) #else: # stress=stress_parameter #Apply multiplier? #if slip[kfault]>7.5: # stress=stress_parameter*stress_multiplier ##elif lon_source>-72.057 and lon_source<-71.2 and lat_source>-30.28: ## stress=stress_parameter*stress_multiplier #else: # stress=stress_parameter #Apply multiplier? #if z_source>high_stress_depth: # stress=stress_parameter*stress_multiplier #else: # stress=stress_parameter # Frankel 95 scaling of corner frequency #verified this looks the same in GP # Right now this applies the same factor to all faults fc_scale = (M0) / (N * stress * dl**3 * 1e21) #Frankel scaling small_event_M0 = stress * dl**3 * 1e21 #Get rho, alpha, beta at subfault depth zs = fault[kfault, 3] mu, alpha, beta = get_mu(structure, zs, return_speeds=True) rho = mu / beta**2 #Get radiation scale factor Spartition = 1 / 2**0.5 if component == 'N': component_angle = 0 elif component == 'E': component_angle = 90 rho = rho / 1000 #to g/cm**3 beta = (beta / 1000) * 1e5 #to cm/s alpha = (alpha / 1000) * 1e5 #Verified this produces same value as in GP CS = (2 * Spartition) / (4 * pi * (rho) * (beta**3)) CP = 2 / (4 * pi * (rho) * (alpha**3)) #Get local subfault rupture speed beta = beta / 100 #to m/s vr = get_local_rupture_speed(zs, beta, rise_time_depths) vr = vr / 1000 #to km/s dip_factor = get_dip_factor(fault[kfault, 5], fault[kfault, 8], fault[kfault, 9]) #Subfault corner frequency c0 = 2.0 #GP2015 value fc_subfault = (c0 * vr) / (dip_factor * pi * dl) #get subfault source spectrum #S=((relative_subfault_M0[kfault]*M0/N)*f**2)/(1+fc_scale*(f/fc_subfault)**2) S = small_event_M0 * (omega**2 / (1 + (f / fc_subfault)**2)) frankel_conv_operator = fc_scale * ( (fc_subfault**2 + f**2) / (fc_subfault**2 + fc_scale * f**2)) S = S * frankel_conv_operator #get high frequency decay P = exp(-pi * kappa * f) # if kfault==0: # out='''Parameters within subfault calculations: # kfault_lon = %10.4f # kfault_lat = %10.4f # CS = %s # CP = %s # S[0] = %s # frankel_conv_operator[0] = %s # '''%(fault[kfault,1],fault[kfault,2],str(CS),str(CP),str(S[0]),str(frankel_conv_operator[0])) # print out # logfile.write(out) #Get other geometric parameters necessar for radiation pattern strike = fault[kfault, 4] dip = fault[kfault, 5] ss = fault[kfault, 8] ds = fault[kfault, 9] rake = rad2deg(arctan2(ds, ss)) #Get ray paths for all direct P arrivals Ppaths = velmod.get_ray_paths(zs, dist_in_degs, phase_list=['P', 'p']) #Get ray paths for all direct S arrivals try: Spaths = velmod.get_ray_paths(zs, dist_in_degs, phase_list=['S', 's']) except: Spaths = velmod.get_ray_paths(zs + tau_perturb, dist_in_degs, phase_list=['S', 's']) #sometimes there's no S, weird I know. Check twice. if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs + tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs + 5 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs - 5 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs + 5 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs - 10 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs + 10 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs - 50 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs + 50 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs - 75 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: Spaths = velmod.get_ray_paths(zs + 75 * tau_perturb, dist_in_degs, phase_list=['S', 's']) if len(Spaths) == 0: print( 'ERROR: I give up, no direct S in spite of multiple attempts at subfault ' + str(kfault)) #Get direct s path and moho reflection mohoS = None directS = Spaths[0] directP = Ppaths[0] #print len(Spaths) if len(Spaths) == 1: #only direct S pass else: #turn_depth=zeros(len(Spaths)-1) #turning depth of other non-direct rays #for k in range(1,len(Spaths)): # turn_depth[k-1]=Spaths[k].path['depth'].max() ##If there's a ray that turns within 2km of Moho, callt hat guy the Moho reflection #deltaz=abs(turn_depth-moho_depth_in_km) #i=argmin(deltaz) #if deltaz[i]<2: #Yes, this is a moho reflection # mohoS=Spaths[i+1] #else: # mohoS=None mohoS = Spaths[-1] ####### Build Direct P ray ###### if Pwave == True: take_off_angle_P = directP.takeoff_angle #Get attenuation due to geometrical spreading (from the path length) path_length_P = get_path_length(directP, zs, dist_in_degs) path_length_P = path_length_P * 100 #to cm #Get effect of intrinsic attenuation for that ray (path integrated) Q_P = get_attenuation(f, structure, directP, Qexp, Qtype='P') #get quarter wavelength amplificationf actors # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code) I_P = get_amplification_factors(f, structure, zs, alpha, rho * 1000) #Build the entire path term G_P = (I_P * Q_P) / path_length_P #Get conically averaged radiation pattern terms RP = conically_avg_P_radiation_pattern(strike, dip, rake, azimuth, take_off_angle_P) RP = abs(RP) #Get partition of Pwave into Z and N,E components incidence_angle = directP.incident_angle Npartition, Epartition, Zpartition = get_P_wave_partition( incidence_angle, azimuth) if component == 'Z': Ppartition = Zpartition elif component == 'N': Ppartition = Npartition else: Ppartition = Epartition #And finally multiply everything together to get the subfault amplitude spectrum AP = CP * S * G_P * P * RP * Ppartition #Generate windowed time series duration = 1. / fc_subfault + 0.09 * (dist / 1000) w = windowed_gaussian(duration, hf_dt, window_type='saragoni_hart') #Go to frequency domain, apply amplitude spectrum and ifft for final time series hf_seis_P = apply_spectrum(w, AP, f, hf_dt) #What time after OT should this time series start at? time_insert = directP.path['time'][-1] + onset_times[kfault] # if directP.time+onset_times[kfault] < earliestP: # earliestP=directP.time+onset_times[kfault] # earliestP_kfault=kfault i = argmin(abs(t - time_insert)) j = i + len(hf_seis_P) #Check seismogram doesn't go past last sample if i < len( hf ) - 1: #if i (the beginning of the seimogram) is less than the length if j > len( hf ): #seismogram goes past total_duration length, trim it len_paste = len(hf) - i j = len(hf) #Add seismogram hf[i:j] = hf[i:j] + real(hf_seis_P[0:len_paste]) else: #Lengths are fine hf[i:j] = hf[i:j] + real(hf_seis_P) else: #Seismogram starts after end of available space pass ####### Build Direct S ray ###### if Swave == True: take_off_angle_S = directS.takeoff_angle #Get attenuation due to geometrical spreading (from the path length) path_length_S = get_path_length(directS, zs, dist_in_degs) path_length_S = path_length_S * 100 #to cm #Get effect of intrinsic aptimeenuation for that ray (path integrated) Q_S = get_attenuation(f, structure, directS, Qexp) #get quarter wavelength amplificationf actors # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code) I_S = get_amplification_factors(f, structure, zs, beta, rho * 1000) #Build the entire path term G_S = (I_S * Q_S) / path_length_S #Get conically averaged radiation pattern terms if component == 'Z': RP_vert = conically_avg_vert_radiation_pattern( strike, dip, rake, azimuth, take_off_angle_S) #And finally multiply everything together to get the subfault amplitude spectrum AS = CS * S * G_S * P * RP_vert else: RP = conically_avg_radiation_pattern( strike, dip, rake, azimuth, take_off_angle_S, component_angle) RP = abs(RP) #And finally multiply everything together to get the subfault amplitude spectrum AS = CS * S * G_S * P * RP #Generate windowed time series duration = 1. / fc_subfault + 0.063 * (dist / 1000) w = windowed_gaussian(duration, hf_dt, window_type='saragoni_hart') #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1]) #Go to frequency domain, apply amplitude spectrum and ifft for final time series hf_seis_S = apply_spectrum(w, AS, f, hf_dt) #What time after OT should this time series start at? time_insert = directS.path['time'][-1] + onset_times[kfault] #print 'ts = '+str(time_insert)+' , Td = '+str(duration) #time_insert=Ppaths[0].path['time'][-1] i = argmin(abs(t - time_insert)) j = i + len(hf_seis_S) #Check seismogram doesn't go past last sample if i < len( hf ) - 1: #if i (the beginning of the seimogram) is less than the length if j > len( hf ): #seismogram goes past total_duration length, trim it len_paste = len(hf) - i j = len(hf) #Add seismogram hf[i:j] = hf[i:j] + real(hf_seis_S[0:len_paste]) else: #Lengths are fine hf[i:j] = hf[i:j] + real(hf_seis_S) else: #Beginning of seismogram is past end of available space pass ####### Build Moho reflected S ray ###### # if mohoS==None: # pass # else: # if kfault%100==0: # print '... ... building Moho reflected S wave' # take_off_angle_mS=mohoS.takeoff_angle # # #Get attenuation due to geometrical spreading (from the path length) # path_length_mS=get_path_length(mohoS,zs,dist_in_degs) # path_length_mS=path_length_mS*100 #to cm # # #Get effect of intrinsic aptimeenuation for that ray (path integrated) # Q_mS=get_attenuation(f,structure,mohoS,Qexp) # # #Build the entire path term # G_mS=(I*Q_mS)/path_length_mS # # #Get conically averaged radiation pattern terms # if component=='Z': # RP_vert=conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS) # #And finally multiply everything together to get the subfault amplitude spectrum # A=C*S*G_mS*P*RP_vert # else: # RP=conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS,component_angle) # RP=abs(RP) # #And finally multiply everything together to get the subfault amplitude spectrum # A=C*S*G_mS*P*RP # # #Generate windowed time series # duration=1./fc_subfault+0.063*(dist/1000) # w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') # #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1]) # # #Go to frequency domain, apply amplitude spectrum and ifft for final time series # hf_seis=apply_spectrum(w,A,f,hf_dt) # # #What time after OT should this time series start at? # time_insert=mohoS.path['time'][-1]+onset_times[kfault] # #print 'ts = '+str(time_insert)+' , Td = '+str(duration) # #time_insert=Ppaths[0].path['time'][-1] # i=argmin(abs(t-time_insert)) # j=i+len(hf_seis) # # #Add seismogram # hf[i:j]=hf[i:j]+hf_seis # # #Done, reset # mohoS=None # if kfault==0: # out=''' More: # fc_scale = %10.4f # subfaultM0 = %E # mu = %E # CS = %E # CP = %E # vr = %10.4f # dip_factor = %10.4f # fc_subfault = %10.4f # directS = %s # directP = %s # '''%(fc_scale,subfault_M0[kfault],mu,CS,CP,vr,dip_factor,fc_subfault,str(directS.time),str(directP.time)) # print out # logfile.write(out) # logfile.close() #Done tr.data = hf / 100 #convert to m/s**2 #Add station location, event location, and first P-wave arrival time to SAC header tr.stats.update({ 'sac': { 'stlo': sta_lon, 'stla': sta_lat, 'evlo': epicenter[0], 'evla': epicenter[1], 'evdp': epicenter[2], 'dist': dist_in_km, 'az': az, 'baz': backaz, 'mag': Mw } }) #,'idep':"ACC (m/s^2)" not sure why idep won't work #Return trace for writing to file # print "Earliest P wave Comes at " + str(earliestP) + "after OT, from location " + str(fault[earliestP_kfault,1]) + ", " + str(fault[earliestP_kfault,2]) + ", " +str(fault[earliestP_kfault,3]) return tr
import matplotlib.pyplot as plt from scipy import stats from obspy.geodetics import kilometer2degrees infname = '../cps_data_dir/syndata_dir_all_EX/Tgr_10.0.txt' # infname = '/media/lili/BCD29BBBD29B787A/cps_data_dir/syndata_dir_all_DD/Tgr_10.0.txt' # infname = './syndata_dir_000_DD/Tgr_10.0.txt' inArr=np.loadtxt(infname) fig1, ax1 = plt.subplots() T=inArr[:,2] DistArr=inArr[:,3] ind=np.argsort(DistArr) DistArr=DistArr[ind] T=T[ind] DeltaArr=kilometer2degrees(DistArr) VgrArr=DistArr/T ax1.plot(DistArr, VgrArr,'--o' , markersize=10); plt.ylabel('Vgr (km/s) ', fontsize=40); plt.xlabel('Distance (km)', fontsize=40); plt.title('Group Velocity', fontsize=40); ax1.tick_params(axis='x', labelsize=30) ax1.tick_params(axis='y', labelsize=30) plt.ylim([2.94, 3.1]) fig2, ax2 = plt.subplots() infname = '../cps_data_dir/syndata_dir_all_EX/Amp_10.0.txt' # infname = '/media/lili/BCD29BBBD29B787A/cps_data_dir/syndata_dir_all_DD/Amp_10.0.txt' inArr2=np.loadtxt(infname) AmpArr=inArr2[:,2]*1000000. DistArr=inArr2[:,3]
def readpicks(sfile): """ Read all pick information from the s-file to an obspy.event.Catalog type. .. note:: This was changed for version 0.1.0 from using the inbuilt \ PICK class. :type sfile: str :param sfile: Path to sfile :return: obspy.core.event.Event .. warning:: Currently finalweight is unsupported, nor is velocity, \ or angle of incidence. This is because obspy.event stores slowness \ in s/deg and takeoff angle, which would require computation from the \ values stored in seisan. Multiple weights are also not supported in \ Obspy.event. .. rubric:: Example >>> event = readpicks('eqcorrscan/tests/test_data/REA/TEST_/' + ... '01-0411-15L.S201309') >>> print(event.origins[0].time) 2013-09-01T04:11:15.700000Z >>> print(event.picks[0].time) 2013-09-01T04:11:17.240000Z """ from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude # Get wavefile name for use in resource_ids wav_names = readwavename(sfile) # First we need to read the header to get the timing info new_event = readheader(sfile) evtime = new_event.origins[0].time f = open(sfile, 'r') pickline = [] # Set a default, ignored later unless overwritten SNR = 999 if 'headerend' in locals(): del headerend for lineno, line in enumerate(f): if 'headerend' in locals(): if len(line.rstrip('\n').rstrip('\r')) in [80, 79] and \ (line[79] == ' ' or line[79] == '4' or line[79] == '\n'): pickline += [line] elif line[79] == '7': header = line headerend = lineno amplitude_index = 0 for pick_index, line in enumerate(pickline): if line[18:28].strip() == '': # If line is empty miss it continue station = line[1:6].strip() channel = line[6:8].strip() network = 'NA' # No network information provided in Sfile. weight = line[14] if weight == '_': phase = line[10:17] weight = 0 polarity = '' else: phase = line[10:14].strip() polarity = line[16] if weight == ' ': weight = 0 if polarity == '': polarity = "undecidable" elif polarity == 'C': polarity = "positive" elif polarity == 'D': polarity = 'negative' else: polarity = "undecidable" if int(line[18:20]) == 24: pickhr = 0 pickday = evtime + 86400 else: pickhr = int(line[18:20]) pickday = evtime try: time = UTCDateTime(pickday.year, pickday.month, pickday.day, pickhr, int(line[20:22]), int(line[23:28].split('.')[0]), int(line[23:28].split('.')[1]) * 10000) except ValueError: time = UTCDateTime(evtime.year, evtime.month, evtime.day, pickhr, int(line[20:22]), 0, 0) time += 60 # Add 60 seconds on to the time, this copes with s-file # preference to write seconds in 1-60 rather than 0-59 which # datetime objects accept coda = _int_conv(line[28:33]) amplitude = _float_conv(line[33:40]) peri = _float_conv(line[41:45]) azimuth = _float_conv(line[46:51]) velocity = _float_conv(line[52:56]) if header[57:60] == 'AIN': AIN = _float_conv(line[57:60]) elif header[57:60] == 'SNR': SNR = _float_conv(line[57:60]) azimuthres = _int_conv(line[60:63]) timeres = _float_conv(line[63:68]) finalweight = _int_conv(line[68:70]) distance = kilometer2degrees(_float_conv(line[70:75])) CAZ = _int_conv(line[76:79]) # Create a new obspy.event.Pick class for this pick _waveform_id = WaveformStreamID(station_code=station, channel_code=channel, network_code=network) new_event.picks.append( Pick(waveform_id=_waveform_id, phase_hint=phase, polarity=polarity, time=time)) if line[9] == 'I': new_event.picks[pick_index].onset = 'impulsive' elif line[9] == 'E': new_event.picks[pick_index].onset = 'emergent' if line[15] == 'A': new_event.picks[pick_index].evaluation_mode = 'automatic' else: new_event.picks[pick_index].evaluation_mode = 'manual' # Note these two are not always filled - velocity conversion not yet # implimented, needs to be converted from km/s to s/deg # if not velocity == 999.0: # new_event.picks[pick_index].horizontal_slowness = 1.0 / velocity if not azimuth == 999: new_event.picks[pick_index].backazimuth = azimuth del _waveform_id # Create new obspy.event.Amplitude class which references above Pick # only if there is an amplitude picked. if not amplitude == 999.0: new_event.amplitudes.append( Amplitude(generic_amplitude=amplitude, period=peri, pick_id=new_event.picks[pick_index].resource_id, waveform_id=new_event.picks[pick_index].waveform_id)) if new_event.picks[pick_index].phase_hint == 'IAML': # Amplitude for local magnitude new_event.amplitudes[amplitude_index].type = 'AML' # Set to be evaluating a point in the trace new_event.amplitudes[amplitude_index].category = 'point' # Default AML unit in seisan is nm (Page 139 of seisan # documentation, version 10.0) new_event.amplitudes[amplitude_index].generic_amplitude /=\ 10**9 new_event.amplitudes[amplitude_index].unit = 'm' new_event.amplitudes[amplitude_index].magnitude_hint = 'ML' else: # Generic amplitude type new_event.amplitudes[amplitude_index].type = 'A' if not SNR == 999.0: new_event.amplitudes[amplitude_index].snr = SNR amplitude_index += 1 elif not coda == 999: # Create an amplitude instance for code duration also new_event.amplitudes.append( Amplitude(generic_amplitude=coda, pick_id=new_event.picks[pick_index].resource_id, waveform_id=new_event.picks[pick_index].waveform_id)) # Amplitude for coda magnitude new_event.amplitudes[amplitude_index].type = 'END' # Set to be evaluating a point in the trace new_event.amplitudes[amplitude_index].category = 'duration' new_event.amplitudes[amplitude_index].unit = 's' new_event.amplitudes[amplitude_index].magnitude_hint = 'Mc' if SNR and not SNR == 999.0: new_event.amplitudes[amplitude_index].snr = SNR amplitude_index += 1 # Create new obspy.event.Arrival class referencing above Pick new_event.origins[0].arrivals.append( Arrival(phase=new_event.picks[pick_index].phase_hint, pick_id=new_event.picks[pick_index].resource_id)) if weight != 999: new_event.origins[0].arrivals[pick_index].time_weight =\ weight if azimuthres != 999: new_event.origins[0].arrivals[pick_index].backazimuth_residual =\ azimuthres if timeres != 999: new_event.origins[0].arrivals[pick_index].time_residual =\ timeres if distance != 999: new_event.origins[0].arrivals[pick_index].distance =\ distance if CAZ != 999: new_event.origins[0].arrivals[pick_index].azimuth =\ CAZ f.close() # Write event to catalog object for ease of .write() method return new_event
def Get_location(la_s, lo_s, la_r, lo_r, radius=3389.5, flattening=0): dist, az, baz = gps2dist_azimuth( lat1=la_s, lon1=lo_s, lat2=la_r, lon2=lo_r, a=radius, f=flattening ) epi = kilometer2degrees(dist, radius=radius) return epi, az, baz
def stochastic_simulation(home,project_name,rupture_name,sta,sta_lon,sta_lat,component,model_name, rise_time_depths,moho_depth_in_km,total_duration=100,hf_dt=0.01,stress_parameter=50, kappa=0.04,Qexp=0.6,Pwave=False,high_stress_depth=1e4): ''' Run stochastic HF sims stress parameter is in bars ''' from numpy import genfromtxt,pi,logspace,log10,mean,where,exp,arange,zeros,argmin,rad2deg,arctan2,real from pyproj import Geod from obspy.geodetics import kilometer2degrees from obspy.taup import TauPyModel from mudpy.forward import get_mu, write_fakequakes_hf_waveforms_one_by_one,read_fakequakes_hypo_time from obspy import Stream,Trace from sys import stdout import warnings #print out what's going on: out='''Running with input parameters: home = %s project_name = %s rupture_name = %s sta = %s sta_lon = %s sta_lat = %s model_name = %s rise_time_depths = %s moho_depth_in_km = %s total_duration = %s hf_dt = %s stress_parameter = %s kappa = %s Qexp = %s component = %s Pwave = %s high_stress_depth = %s '''%(home,project_name,rupture_name,sta,str(sta_lon),str(sta_lat),model_name,str(rise_time_depths), str(moho_depth_in_km),str(total_duration),str(hf_dt),str(stress_parameter), str(kappa),str(Qexp),str(component),str(Pwave),str(high_stress_depth)) print out # rupture=rupture_name.split('.')[0]+'.'+rupture_name.split('.')[1] # log=home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.1cpu.log' # logfile=open(log,'w') # logfile.write(out) #print 'stress is '+str(stress_parameter) #I don't condone it but this cleans up the warnings warnings.filterwarnings("ignore") #Load the source fault=genfromtxt(home+project_name+'/output/ruptures/'+rupture_name) #Onset times for each subfault onset_times=fault[:,12] #load velocity structure structure=genfromtxt(home+project_name+'/structure/'+model_name) #Frequencies vector f=logspace(log10(hf_dt),log10(1/(2*hf_dt))+0.01,50) omega=2*pi*f #Output time vector (0 is origin time) t=arange(0,total_duration,hf_dt) #Projection object for distance calculations g=Geod(ellps='WGS84') #Create taup velocity model object, paste on top of iaspei91 #taup_create.build_taup_model(home+project_name+'/structure/bbp_norcal.tvel',output_folder=home+project_name+'/structure/') velmod=TauPyModel(model=home+project_name+'/structure/maule',verbose=True) #Get epicentral time epicenter,time_epi=read_fakequakes_hypo_time(home,project_name,rupture_name) #Moments slip=(fault[:,8]**2+fault[:,9]**2)**0.5 subfault_M0=slip*fault[:,10]*fault[:,11]*fault[:,13] subfault_M0=subfault_M0*1e7 #to dyne-cm M0=subfault_M0.sum() relative_subfault_M0=subfault_M0/M0 Mw=(2./3)*(log10(M0*1e-7)-9.1) #Corner frequency scaling i=where(slip>0)[0] #Non-zero faults N=len(i) #number of subfaults dl=mean((fault[:,10]+fault[:,11])/2) #predominant length scale dl=dl/1000 # to km #Tau=p perturbation tau_perturb=0.1 #Deep faults receive a higher stress stress_multiplier=3 print '... working on '+component+' component semistochastic waveform for station '+sta #initalize output seismogram tr=Trace() tr.stats.station=sta tr.stats.delta=hf_dt tr.stats.starttime=time_epi #info for sac header (added at the end) az,backaz,dist_m=g.inv(epicenter[0],epicenter[1],sta_lon,sta_lat) dist_in_km=dist_m/1000. hf=zeros(len(t)) # out='''Parameters before we get into subfault calculations: # rupture_name = %s # epicenter = %s # time_epi = %s # M0 = %E # Mw = %10.4f # Num_Subfaults = %i # dl = %.2f # Dist_in_km = %10.4f # '''%(rupture_name,str(epicenter),str(time_epi),M0,Mw,int(N),dl,dist_in_km) # print out # logfile.write(out) #Loop over subfaults # earliestP=1e10 #something outrageously high # earliestP_kfault=1e10 for kfault in range(len(fault)): #Print status to screen if kfault % 150 == 0: if kfault==0: stdout.write(' [') stdout.flush() stdout.write('.') stdout.flush() if kfault==len(fault)-1: stdout.write(']\n') stdout.flush() #Include only subfaults with non-zero slip if subfault_M0[kfault]>0: #Get subfault to station distance lon_source=fault[kfault,1] lat_source=fault[kfault,2] azimuth,baz,dist=g.inv(lon_source,lat_source,sta_lon,sta_lat) dist_in_degs=kilometer2degrees(dist/1000.) #Source depth? z_source=fault[kfault,3] #No change stress=stress_parameter #Is subfault in an SMGA? #radius_in_km=15.0 #smga_center_lon=-69.709200 #smga_center_lat=-19.683600 #in_smga=is_subfault_in_smga(lon_source,lat_source,smga_center_lon,smga_center_lat,radius_in_km) # ###Apply multiplier? #if in_smga==True: # stress=stress_parameter*stress_multiplier # print "%.4f,%.4f is in SMGA, stress is %d" % (lon_source,lat_source,stress) #else: # stress=stress_parameter #Apply multiplier? #if slip[kfault]>7.5: # stress=stress_parameter*stress_multiplier ##elif lon_source>-72.057 and lon_source<-71.2 and lat_source>-30.28: ## stress=stress_parameter*stress_multiplier #else: # stress=stress_parameter #Apply multiplier? #if z_source>high_stress_depth: # stress=stress_parameter*stress_multiplier #else: # stress=stress_parameter # Frankel 95 scaling of corner frequency #verified this looks the same in GP # Right now this applies the same factor to all faults fc_scale=(M0)/(N*stress*dl**3*1e21) #Frankel scaling small_event_M0 = stress*dl**3*1e21 #Get rho, alpha, beta at subfault depth zs=fault[kfault,3] mu,alpha,beta=get_mu(structure,zs,return_speeds=True) rho=mu/beta**2 #Get radiation scale factor Spartition=1/2**0.5 if component=='N' : component_angle=0 elif component=='E': component_angle=90 rho=rho/1000 #to g/cm**3 beta=(beta/1000)*1e5 #to cm/s alpha=(alpha/1000)*1e5 #Verified this produces same value as in GP CS=(2*Spartition)/(4*pi*(rho)*(beta**3)) CP=2/(4*pi*(rho)*(alpha**3)) #Get local subfault rupture speed beta=beta/100 #to m/s vr=get_local_rupture_speed(zs,beta,rise_time_depths) vr=vr/1000 #to km/s dip_factor=get_dip_factor(fault[kfault,5],fault[kfault,8],fault[kfault,9]) #Subfault corner frequency c0=2.0 #GP2015 value fc_subfault=(c0*vr)/(dip_factor*pi*dl) #get subfault source spectrum #S=((relative_subfault_M0[kfault]*M0/N)*f**2)/(1+fc_scale*(f/fc_subfault)**2) S=small_event_M0*(omega**2/(1+(f/fc_subfault)**2)) frankel_conv_operator= fc_scale*((fc_subfault**2+f**2)/(fc_subfault**2+fc_scale*f**2)) S=S*frankel_conv_operator #get high frequency decay P=exp(-pi*kappa*f) #get quarter wavelength amplificationf actors # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code) I=get_amplification_factors(f,structure,zs,beta,rho*1000) # if kfault==0: # out='''Parameters within subfault calculations: # kfault_lon = %10.4f # kfault_lat = %10.4f # CS = %s # CP = %s # S[0] = %s # frankel_conv_operator[0] = %s # '''%(fault[kfault,1],fault[kfault,2],str(CS),str(CP),str(S[0]),str(frankel_conv_operator[0])) # print out # logfile.write(out) #Get other geometric parameters necessar for radiation pattern strike=fault[kfault,4] dip=fault[kfault,5] ss=fault[kfault,8] ds=fault[kfault,9] rake=rad2deg(arctan2(ds,ss)) #Get ray paths for all direct P arrivals Ppaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['P','p']) #Get ray paths for all direct S arrivals try: Spaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['S','s']) except: Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s']) #sometimes there's no S, weird I know. Check twice. if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+5*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs-5*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+5*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs-10*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+10*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs-50*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+50*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs-75*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+75*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: print 'ERROR: I give up, no direct S in spite of multiple attempts at subfault '+str(kfault) #Get direct s path and moho reflection mohoS=None directS=Spaths[0] directP=Ppaths[0] #print len(Spaths) if len(Spaths)==1: #only direct S pass else: #turn_depth=zeros(len(Spaths)-1) #turning depth of other non-direct rays #for k in range(1,len(Spaths)): # turn_depth[k-1]=Spaths[k].path['depth'].max() ##If there's a ray that turns within 2km of Moho, callt hat guy the Moho reflection #deltaz=abs(turn_depth-moho_depth_in_km) #i=argmin(deltaz) #if deltaz[i]<2: #Yes, this is a moho reflection # mohoS=Spaths[i+1] #else: # mohoS=None mohoS=Spaths[-1] ####### Build Direct P ray ###### if Pwave==True: take_off_angle_P=directP.takeoff_angle #Get attenuation due to geometrical spreading (from the path length) path_length_P=get_path_length(directP,zs,dist_in_degs) path_length_P=path_length_P*100 #to cm #Get effect of intrinsic attenuation for that ray (path integrated) Q_P=get_attenuation(f,structure,directS,Qexp,Qtype='P') #Build the entire path term G_P=(I*Q_P)/path_length_P #Get conically averaged radiation pattern terms RP=conically_avg_P_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_P) RP=abs(RP) #Get partition of Pwave into Z and N,E components incidence_angle=directP.incident_angle Npartition,Epartition,Zpartition=get_P_wave_partition(incidence_angle,azimuth) if component=='Z': Ppartition=Zpartition elif component=='N': Ppartition=Npartition else: Ppartition=Epartition #And finally multiply everything together to get the subfault amplitude spectrum AP=CP*S*G_P*P*RP*Ppartition #Generate windowed time series duration=1./fc_subfault+0.09*(dist/1000) w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') #Go to frequency domain, apply amplitude spectrum and ifft for final time series hf_seis_P=apply_spectrum(w,AP,f,hf_dt) #What time after OT should this time series start at? time_insert=directP.path['time'][-1]+onset_times[kfault] # if directP.time+onset_times[kfault] < earliestP: # earliestP=directP.time+onset_times[kfault] # earliestP_kfault=kfault i=argmin(abs(t-time_insert)) j=i+len(hf_seis_P) #Check seismogram doesn't go past last sample if i<len(hf)-1: #if i (the beginning of the seimogram) is less than the length if j>len(hf): #seismogram goes past total_duration length, trim it len_paste=len(hf)-i j=len(hf) #Add seismogram hf[i:j]=hf[i:j]+real(hf_seis_P[0:len_paste]) else: #Lengths are fine hf[i:j]=hf[i:j]+real(hf_seis_P) else: #Seismogram starts after end of available space pass ####### Build Direct S ray ###### take_off_angle_S=directS.takeoff_angle #Get attenuation due to geometrical spreading (from the path length) path_length_S=get_path_length(directS,zs,dist_in_degs) path_length_S=path_length_S*100 #to cm #Get effect of intrinsic aptimeenuation for that ray (path integrated) Q_S=get_attenuation(f,structure,directS,Qexp) #Build the entire path term G_S=(I*Q_S)/path_length_S #Get conically averaged radiation pattern terms if component=='Z': RP_vert=conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S) #And finally multiply everything together to get the subfault amplitude spectrum AS=CS*S*G_S*P*RP_vert else: RP=conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S,component_angle) RP=abs(RP) #And finally multiply everything together to get the subfault amplitude spectrum AS=CS*S*G_S*P*RP #Generate windowed time series duration=1./fc_subfault+0.063*(dist/1000) w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1]) #Go to frequency domain, apply amplitude spectrum and ifft for final time series hf_seis_S=apply_spectrum(w,AS,f,hf_dt) #What time after OT should this time series start at? time_insert=directS.path['time'][-1]+onset_times[kfault] #print 'ts = '+str(time_insert)+' , Td = '+str(duration) #time_insert=Ppaths[0].path['time'][-1] i=argmin(abs(t-time_insert)) j=i+len(hf_seis_S) #Check seismogram doesn't go past last sample if i<len(hf)-1: #if i (the beginning of the seimogram) is less than the length if j>len(hf): #seismogram goes past total_duration length, trim it len_paste=len(hf)-i j=len(hf) #Add seismogram hf[i:j]=hf[i:j]+real(hf_seis_S[0:len_paste]) else: #Lengths are fine hf[i:j]=hf[i:j]+real(hf_seis_S) else: #Beginning of seismogram is past end of available space pass ####### Build Moho reflected S ray ###### # if mohoS==None: # pass # else: # if kfault%100==0: # print '... ... building Moho reflected S wave' # take_off_angle_mS=mohoS.takeoff_angle # # #Get attenuation due to geometrical spreading (from the path length) # path_length_mS=get_path_length(mohoS,zs,dist_in_degs) # path_length_mS=path_length_mS*100 #to cm # # #Get effect of intrinsic aptimeenuation for that ray (path integrated) # Q_mS=get_attenuation(f,structure,mohoS,Qexp) # # #Build the entire path term # G_mS=(I*Q_mS)/path_length_mS # # #Get conically averaged radiation pattern terms # if component=='Z': # RP_vert=conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS) # #And finally multiply everything together to get the subfault amplitude spectrum # A=C*S*G_mS*P*RP_vert # else: # RP=conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS,component_angle) # RP=abs(RP) # #And finally multiply everything together to get the subfault amplitude spectrum # A=C*S*G_mS*P*RP # # #Generate windowed time series # duration=1./fc_subfault+0.063*(dist/1000) # w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') # #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1]) # # #Go to frequency domain, apply amplitude spectrum and ifft for final time series # hf_seis=apply_spectrum(w,A,f,hf_dt) # # #What time after OT should this time series start at? # time_insert=mohoS.path['time'][-1]+onset_times[kfault] # #print 'ts = '+str(time_insert)+' , Td = '+str(duration) # #time_insert=Ppaths[0].path['time'][-1] # i=argmin(abs(t-time_insert)) # j=i+len(hf_seis) # # #Add seismogram # hf[i:j]=hf[i:j]+hf_seis # # #Done, reset # mohoS=None # if kfault==0: # out=''' More: # fc_scale = %10.4f # subfaultM0 = %E # mu = %E # CS = %E # CP = %E # vr = %10.4f # dip_factor = %10.4f # fc_subfault = %10.4f # directS = %s # directP = %s # '''%(fc_scale,subfault_M0[kfault],mu,CS,CP,vr,dip_factor,fc_subfault,str(directS.time),str(directP.time)) # print out # logfile.write(out) # logfile.close() #Done tr.data=hf/100 #convert to m/s**2 #Add station location, event location, and first P-wave arrival time to SAC header tr.stats.update({'sac':{'stlo':sta_lon,'stla':sta_lat,'evlo':epicenter[0],'evla':epicenter[1],'evdp':epicenter[2],'dist':dist_in_km,'az':az,'baz':backaz,'mag':Mw}}) #,'idep':"ACC (m/s^2)" not sure why idep won't work #Return trace for writing to file # print "Earliest P wave Comes at " + str(earliestP) + "after OT, from location " + str(fault[earliestP_kfault,1]) + ", " + str(fault[earliestP_kfault,2]) + ", " +str(fault[earliestP_kfault,3]) return tr
def stochastic_simulation(home,project_name,rupture_name,GF_list,time_epi,model_name, rise_time_depths,moho_depth_in_km,total_duration=100,hf_dt=0.01,stress_parameter=50, kappa=0.04,Qexp=0.6,component='N',Pwave=False): ''' Run stochastic HF sims stress parameter is in bars ''' from numpy import genfromtxt,pi,logspace,log10,mean,where,exp,arange,zeros,argmin,rad2deg,arctan2 from pyproj import Geod from obspy.geodetics import kilometer2degrees from obspy.taup import taup_create,TauPyModel from mudpy.forward import get_mu from obspy import Stream,Trace from matplotlib import pyplot as plt #initalize output object st=Stream() #Load the source fault=genfromtxt(home+project_name+'/output/ruptures/'+rupture_name) #Onset times for each subfault onset_times=fault[:,12] #Load stations sta=genfromtxt(home+project_name+'/data/station_info/'+GF_list,usecols=[0],dtype='S') lonlat=genfromtxt(home+project_name+'/data/station_info/'+GF_list,usecols=[1,2]) #load velocity structure structure=genfromtxt(home+project_name+'/structure/'+model_name) #Frequencies vector f=logspace(log10(hf_dt),log10(1/(2*hf_dt))+0.01,50) omega=2*pi*f #Output time vector (0 is origin time) t=arange(0,total_duration,hf_dt) #Projection object for distance calculations g=Geod(ellps='WGS84') #Create taup velocity model object, paste on top of iaspei91 #taup_create.build_taup_model(home+project_name+'/structure/bbp_norcal.tvel',output_folder=home+project_name+'/structure/') velmod=TauPyModel(model=home+project_name+'/structure/bbp_norcal',verbose=True) #Moments slip=(fault[:,8]**2+fault[:,9]**2)**0.5 subfault_M0=slip*fault[:,10]*fault[:,11]*fault[:,13] subfault_M0=subfault_M0*1e7 #to dyne-cm M0=subfault_M0.sum() relative_subfault_M0=subfault_M0/M0 #Corner frequency scaling i=where(slip>0)[0] #Non-zero faults N=len(i) #number of subfaults dl=mean((fault[:,10]+fault[:,11])/2) #perdominant length scale dl=dl/1000 # to km # Frankel 95 scaling of corner frequency #verified this looks the same in GP # Right now this applies the same factor to all faults # Move inside the loop with right dl???? fc_scale=(M0)/(N*stress_parameter*dl**3*1e21) #Frankel scaling #Move this inisde loop? small_event_M0 = stress_parameter*dl**3*1e21 #Tau=p perturbation tau_perturb=0.1 #Loop over stations for ksta in range(len(lonlat)): print '... working on '+component+' component semistochastic waveform for station '+sta[ksta] #initalize output seismogram tr=Trace() tr.stats.station=sta[ksta] tr.stats.delta=hf_dt tr.stats.starttime=time_epi hf=zeros(len(t)) #Loop over subfaults for kfault in range(len(fault)): #Include only subfaults with non-zero slip if subfault_M0[kfault]>0: #Get subfault to station distance lon_source=fault[kfault,1] lat_source=fault[kfault,2] azimuth,baz,dist=g.inv(lon_source,lat_source,lonlat[ksta,0],lonlat[ksta,1]) dist_in_degs=kilometer2degrees(dist/1000.) #Get rho, alpha, beta at subfault depth zs=fault[kfault,3] mu,alpha,beta=get_mu(structure,zs,return_speeds=True) rho=mu/beta**2 #Get radiation scale factor Spartition=1/2**0.5 if component=='N' : component_angle=0 elif component=='E': component_angle=90 rho=rho/1000 #to g/cm**3 beta=(beta/1000)*1e5 #to cm/s alpha=(alpha/1000)*1e5 #Verified this produces same value as in GP CS=(2*Spartition)/(4*pi*(rho)*(beta**3)) CP=2/(4*pi*(rho)*(alpha**3)) #Get local subfault rupture speed beta=beta/100 #to m/s vr=get_local_rupture_speed(zs,beta,rise_time_depths) vr=vr/1000 #to km/s dip_factor=get_dip_factor(fault[kfault,5],fault[kfault,8],fault[kfault,9]) #Subfault corner frequency c0=2.0 #GP2015 value fc_subfault=(c0*vr)/(dip_factor*pi*dl) #get subfault source spectrum #S=((relative_subfault_M0[kfault]*M0/N)*f**2)/(1+fc_scale*(f/fc_subfault)**2) S=small_event_M0*(omega**2/(1+(f/fc_subfault)**2)) frankel_conv_operator= fc_scale*((fc_subfault**2+f**2)/(fc_subfault**2+fc_scale*f**2)) S=S*frankel_conv_operator #get high frequency decay P=exp(-pi*kappa*f) #get quarter wavelength amplificationf actors # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code) I=get_amplification_factors(f,structure,zs,beta,rho*1000) #Get other geometric parameters necessar for radiation pattern strike=fault[kfault,4] dip=fault[kfault,5] ss=fault[kfault,8] ds=fault[kfault,9] rake=rad2deg(arctan2(ds,ss)) #Get ray paths for all direct S arrivals Ppaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['P','p']) #Get ray paths for all direct S arrivals try: Spaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['S','s']) except: Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s']) #Get direct s path and moho reflection mohoS=None directS=Spaths[0] directP=Ppaths[0] #print len(Spaths) if len(Spaths)==1: #only direct S pass else: #turn_depth=zeros(len(Spaths)-1) #turning depth of other non-direct rays #for k in range(1,len(Spaths)): # turn_depth[k-1]=Spaths[k].path['depth'].max() ##If there's a ray that turns within 2km of Moho, callt hat guy the Moho reflection #deltaz=abs(turn_depth-moho_depth_in_km) #i=argmin(deltaz) #if deltaz[i]<2: #Yes, this is a moho reflection # mohoS=Spaths[i+1] #else: # mohoS=None mohoS=Spaths[-1] ####### Build Direct P ray ###### if Pwave==True: take_off_angle_P=directP.takeoff_angle #Get attenuation due to geometrical spreading (from the path length) path_length_P=get_path_length(directP,zs,dist_in_degs) path_length_P=path_length_P*100 #to cm #Get effect of intrinsic aptimeenuation for that ray (path integrated) Q_P=get_attenuation(f,structure,directS,Qexp,Qtype='P') #Build the entire path term G_P=(I*Q_P)/path_length_P #Get conically averaged radiation pattern terms RP=conically_avg_P_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_P) RP=abs(RP) #Get partition of Pwave into Z and N,E components incidence_angle=directP.incident_angle Npartition,Epartition,Zpartition=get_P_wave_partition(incidence_angle,azimuth) if component=='Z': Ppartition=Zpartition elif component=='N': Ppartition=Npartition else: Ppartition=Epartition #And finally multiply everything together to get the subfault amplitude spectrum AP=CP*S*G_P*P*RP*Ppartition #Generate windowed time series duration=1./fc_subfault+0.09*(dist/1000) w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') #Go to frequency domain, apply amplitude spectrum and ifft for final time series hf_seis_P=apply_spectrum(w,AP,f,hf_dt) #What time after OT should this time series start at? time_insert=directP.path['time'][-1]+onset_times[kfault] i=argmin(abs(t-time_insert)) j=i+len(hf_seis_P) #Add seismogram hf[i:j]=hf[i:j]+hf_seis_P ####### Build Direct S ray ###### take_off_angle_S=directS.takeoff_angle #Get attenuation due to geometrical spreading (from the path length) path_length_S=get_path_length(directS,zs,dist_in_degs) path_length_S=path_length_S*100 #to cm #Get effect of intrinsic aptimeenuation for that ray (path integrated) Q_S=get_attenuation(f,structure,directS,Qexp) #Build the entire path term G_S=(I*Q_S)/path_length_S #Get conically averaged radiation pattern terms if component=='Z': RP_vert=conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S) #And finally multiply everything together to get the subfault amplitude spectrum AS=CS*S*G_S*P*RP_vert else: RP=conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S,component_angle) RP=abs(RP) #And finally multiply everything together to get the subfault amplitude spectrum AS=CS*S*G_S*P*RP #Generate windowed time series duration=1./fc_subfault+0.063*(dist/1000) w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1]) #Go to frequency domain, apply amplitude spectrum and ifft for final time series hf_seis_S=apply_spectrum(w,AS,f,hf_dt) #What time after OT should this time series start at? time_insert=directS.path['time'][-1]+onset_times[kfault] #print 'ts = '+str(time_insert)+' , Td = '+str(duration) #time_insert=Ppaths[0].path['time'][-1] i=argmin(abs(t-time_insert)) j=i+len(hf_seis_S) #Add seismogram hf[i:j]=hf[i:j]+hf_seis_S ####### Build Moho reflected S ray ###### # if mohoS==None: # pass # else: # if kfault%100==0: # print '... ... building Moho reflected S wave' # take_off_angle_mS=mohoS.takeoff_angle # # #Get attenuation due to geometrical spreading (from the path length) # path_length_mS=get_path_length(mohoS,zs,dist_in_degs) # path_length_mS=path_length_mS*100 #to cm # # #Get effect of intrinsic aptimeenuation for that ray (path integrated) # Q_mS=get_attenuation(f,structure,mohoS,Qexp) # # #Build the entire path term # G_mS=(I*Q_mS)/path_length_mS # # #Get conically averaged radiation pattern terms # if component=='Z': # RP_vert=conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS) # #And finally multiply everything together to get the subfault amplitude spectrum # A=C*S*G_mS*P*RP_vert # else: # RP=conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS,component_angle) # RP=abs(RP) # #And finally multiply everything together to get the subfault amplitude spectrum # A=C*S*G_mS*P*RP # # #Generate windowed time series # duration=1./fc_subfault+0.063*(dist/1000) # w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') # #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1]) # # #Go to frequency domain, apply amplitude spectrum and ifft for final time series # hf_seis=apply_spectrum(w,A,f,hf_dt) # # #What time after OT should this time series start at? # time_insert=mohoS.path['time'][-1]+onset_times[kfault] # #print 'ts = '+str(time_insert)+' , Td = '+str(duration) # #time_insert=Ppaths[0].path['time'][-1] # i=argmin(abs(t-time_insert)) # j=i+len(hf_seis) # # #Add seismogram # hf[i:j]=hf[i:j]+hf_seis # # #Done, reset # mohoS=None #Done add to trace and stream tr.data=hf/100 #convert to m/s**2 st+=tr return st
def run_parallel_hfsims(home,project_name,rupture_name,N,M0,sta,sta_lon,sta_lat,component,model_name, rise_time_depths0,rise_time_depths1,moho_depth_in_km,total_duration, hf_dt,stress_parameter,kappa,Qexp,Pwave,Swave,high_stress_depth, Qmethod,scattering,Qc_exp,baseline_Qc,rank,size): ''' Run stochastic HF sims stress parameter is in bars ''' from numpy import genfromtxt,pi,logspace,log10,mean,where,exp,arange,zeros,argmin,rad2deg,arctan2,real,savetxt,c_ from pyproj import Geod from obspy.geodetics import kilometer2degrees from obspy.taup import TauPyModel from mudpy.forward import get_mu, read_fakequakes_hypo_time from mudpy import hfsims from obspy import Stream,Trace from sys import stdout from os import path,makedirs from mudpy.hfsims import is_subfault_in_smga import warnings rank=int(rank) if rank==0 and component=='N': #print out what's going on: out='''Running with input parameters: home = %s project_name = %s rupture_name = %s N = %s M0 (N-m) = %s sta = %s sta_lon = %s sta_lat = %s model_name = %s rise_time_depths = %s moho_depth_in_km = %s total_duration = %s hf_dt = %s stress_parameter = %s kappa = %s Qexp = %s component = %s Pwave = %s Swave = %s high_stress_depth = %s Qmethod = %s scattering = %s Qc_exp = %s baseline_Qc = %s '''%(home,project_name,rupture_name,str(N),str(M0/1e7),sta,str(sta_lon),str(sta_lat),model_name,str([rise_time_depths0,rise_time_depths1]), str(moho_depth_in_km),str(total_duration),str(hf_dt),str(stress_parameter),str(kappa),str(Qexp),str(component),str(Pwave),str(Swave), str(high_stress_depth),str(Qmethod),str(scattering),str(Qc_exp),str(baseline_Qc)) print(out) if rank==0: out=''' Rupture_Name = %s Station = %s Component (N,E,Z) = %s Sample rate = %sHz Duration = %ss '''%(rupture_name,sta,component,str(1/hf_dt),str(total_duration)) print(out) #print 'stress is '+str(stress_parameter) #I don't condone it but this cleans up the warnings warnings.filterwarnings("ignore") #Fix input formats: rise_time_depths=[rise_time_depths0,rise_time_depths1] #Load the source mpi_rupt=home+project_name+'/output/ruptures/mpi_rupt.'+str(rank)+'.'+rupture_name fault=genfromtxt(mpi_rupt) #Onset times for each subfault onset_times=fault[:,12] #load velocity structure structure=genfromtxt(home+project_name+'/structure/'+model_name) #Frequencies vector f=logspace(log10(1/total_duration),log10(1/(2*hf_dt))+0.01,100) omega=2*pi*f #Output time vector (0 is origin time) t=arange(0,total_duration,hf_dt) #Projection object for distance calculations g=Geod(ellps='WGS84') #Create taup velocity model object, paste on top of iaspei91 #taup_create.build_taup_model(home+project_name+'/structure/bbp_norcal.tvel',output_folder=home+project_name+'/structure/') # velmod=TauPyModel(model=home+project_name+'/structure/iquique',verbose=True) velmod = TauPyModel(model=home+project_name+'/structure/'+model_name.split('.')[0]+'.npz') #Get epicentral time epicenter,time_epi=read_fakequakes_hypo_time(home,project_name,rupture_name) #Moments slip=(fault[:,8]**2+fault[:,9]**2)**0.5 subfault_M0=slip*fault[:,10]*fault[:,11]*fault[:,13] subfault_M0=subfault_M0*1e7 #to dyne-cm relative_subfault_M0=subfault_M0/M0 Mw=(2./3)*(log10(M0*1e-7)-9.1) #Corner frequency scaling i=where(slip>0)[0] #Non-zero faults dl=mean((fault[:,10]+fault[:,11])/2) #predominant length scale dl=dl/1000 # to km #Tau=p perturbation tau_perturb=0.1 #Deep faults receive a higher stress stress_multiplier=1 #initalize output seismogram tr=Trace() tr.stats.station=sta tr.stats.delta=hf_dt tr.stats.starttime=time_epi #info for sac header (added at the end) az,backaz,dist_m=g.inv(epicenter[0],epicenter[1],sta_lon,sta_lat) dist_in_km=dist_m/1000. hf=zeros(len(t)) #Loop over subfaults for kfault in range(len(fault)): if rank==0: #Print status to screen if kfault % 25 == 0: if kfault==0: stdout.write(' [.') stdout.flush() stdout.write('.') stdout.flush() if kfault==len(fault)-1: stdout.write('.]\n') stdout.flush() #Include only subfaults with non-zero slip if subfault_M0[kfault]>0: #Get subfault to station distance lon_source=fault[kfault,1] lat_source=fault[kfault,2] azimuth,baz,dist=g.inv(lon_source,lat_source,sta_lon,sta_lat) dist_in_degs=kilometer2degrees(dist/1000.) #Source depth? z_source=fault[kfault,3] #No change stress=stress_parameter #Is subfault in an SMGA? #SMGA1 # radius_in_km=15.0 # smga_center_lon=-71.501 # smga_center_lat=-30.918 #SMGA2 # radius_in_km=15.0 # smga_center_lon=-71.863 # smga_center_lat=-30.759 #smga3 # radius_in_km=7.5 # smga_center_lon=-72.3923 # smga_center_lat=-30.58 #smga4 # radius_in_km=7.5 # smga_center_lon=-72.3923 # smga_center_lat=-30.61 # in_smga=is_subfault_in_smga(lon_source,lat_source,smga_center_lon,smga_center_lat,radius_in_km) # ###Apply multiplier? # if in_smga==True: # stress=stress_parameter*stress_multiplier # print("%.4f,%.4f is in SMGA, stress is %d" % (lon_source,lat_source,stress)) # else: # stress=stress_parameter #Apply multiplier? #if slip[kfault]>7.5: # stress=stress_parameter*stress_multiplier ##elif lon_source>-72.057 and lon_source<-71.2 and lat_source>-30.28: ## stress=stress_parameter*stress_multiplier #else: # stress=stress_parameter #Apply multiplier? #if z_source>high_stress_depth: # stress=stress_parameter*stress_multiplier #else: # stress=stress_parameter # Frankel 95 scaling of corner frequency #verified this looks the same in GP # Right now this applies the same factor to all faults fc_scale=(M0)/(N*stress*dl**3*1e21) #Frankel scaling small_event_M0 = stress*dl**3*1e21 #Get rho, alpha, beta at subfault depth zs=fault[kfault,3] mu,alpha,beta=get_mu(structure,zs,return_speeds=True) rho=mu/beta**2 #Get radiation scale factor Spartition=1/2**0.5 if component=='N' : component_angle=0 elif component=='E': component_angle=90 rho=rho/1000 #to g/cm**3 beta=(beta/1000)*1e5 #to cm/s alpha=(alpha/1000)*1e5 # print('rho = '+str(rho)) # print('beta = '+str(beta)) # print('alpha = '+str(alpha)) #Verified this produces same value as in GP CS=(2*Spartition)/(4*pi*(rho)*(beta**3)) CP=2/(4*pi*(rho)*(alpha**3)) #Get local subfault rupture speed beta=beta/100 #to m/s vr=hfsims.get_local_rupture_speed(zs,beta,rise_time_depths) vr=vr/1000 #to km/s dip_factor=hfsims.get_dip_factor(fault[kfault,5],fault[kfault,8],fault[kfault,9]) #Subfault corner frequency c0=2.0 #GP2015 value fc_subfault=(c0*vr)/(dip_factor*pi*dl) #get subfault source spectrum #S=((relative_subfault_M0[kfault]*M0/N)*f**2)/(1+fc_scale*(f/fc_subfault)**2) S=small_event_M0*(omega**2/(1+(f/fc_subfault)**2)) frankel_conv_operator= fc_scale*((fc_subfault**2+f**2)/(fc_subfault**2+fc_scale*f**2)) S=S*frankel_conv_operator #get high frequency decay P=exp(-pi*kappa*f) #Get other geometric parameters necessar for radiation pattern strike=fault[kfault,4] dip=fault[kfault,5] ss=fault[kfault,8] ds=fault[kfault,9] rake=rad2deg(arctan2(ds,ss)) #Get ray paths for all direct P arrivals Ppaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['P','p']) #Get ray paths for all direct S arrivals try: Spaths=velmod.get_ray_paths(zs,dist_in_degs,phase_list=['S','s']) except: Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s']) #sometimes there's no S, weird I know. Check twice. if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+5*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs-5*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+5*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs-10*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+10*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs-50*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+50*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs-75*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: Spaths=velmod.get_ray_paths(zs+75*tau_perturb,dist_in_degs,phase_list=['S','s']) if len(Spaths)==0: print('ERROR: I give up, no direct S in spite of multiple attempts at subfault '+str(kfault)) #Which ray should I keep? #This is the fastest arriving P directP=Ppaths[0] #Get moho depth from velmod moho_depth = velmod.model.moho_depth # In this method here are the rules: #For S do not allow Moho turning rays, keep the fastest non Moho turning ray. If #only Moho rays are available, then keep the one that turns the shallowest. if Qmethod == 'no_moho': #get turning depths and arrival times of S rays turning_depths = zeros(len(Spaths)) S_ray_times = zeros(len(Spaths)) for kray in range(len(Spaths)): turning_depths[kray] = Spaths[kray].path['depth'].max() S_ray_times[kray] = Spaths[kray].path['time'].max() #Keep only rays that turn above Moho i=where(turning_depths < moho_depth)[0] if len(i) == 0: #all rays turn below Moho, keep shallowest turning i_min_depth = argmin(turning_depths) directS = Spaths[i_min_depth] else: #Keep fastest arriving ray that turns above Moho Spaths = [Spaths[j] for j in i] #Rays turning above Moho, NOTE: I hate list comprehension S_ray_times = S_ray_times[i] i_min_time = argmin(S_ray_times) directS = Spaths[i_min_time] elif Qmethod =='shallowest': #get turning depths and arrival times of S rays turning_depths = zeros(len(Spaths)) for kray in range(len(Spaths)): turning_depths[kray] = Spaths[kray].path['depth'].max() i_min_depth = argmin(turning_depths) directS = Spaths[i_min_depth] elif Qmethod == 'fastest' or Qmethod=='direct': #Pick first arriving S wave directS = Spaths[0] #directS=Spaths[0] #this is the old way, kept fastest S mohoS=None # #print len(Spaths) # if len(Spaths)==1: #only direct S # pass # else: # #turn_depth=zeros(len(Spaths)-1) #turning depth of other non-direct rays # #for k in range(1,len(Spaths)): # # turn_depth[k-1]=Spaths[k].path['depth'].max() # ##If there's a ray that turns within 2km of Moho, callt hat guy the Moho reflection # #deltaz=abs(turn_depth-moho_depth_in_km) # #i=argmin(deltaz) # #if deltaz[i]<2: #Yes, this is a moho reflection # # mohoS=Spaths[i+1] # #else: # # mohoS=None # mohoS=Spaths[-1] ####### Build Direct P ray ###### if Pwave==True: take_off_angle_P=directP.takeoff_angle # #Get attenuation due to geometrical spreading (from the path length) # path_length_P=hfsims.get_path_length(directP,zs,dist_in_degs) # path_length_P=path_length_P*100 #to cm # #Get effect of intrinsic attenuation for that ray (path integrated) # #Q_P=hfsims.get_attenuation(f,structure,directP,Qexp,Qtype='P') <- This causes problems and I don't know why underlying assumptions might be bad # Q_P=hfsims.get_attenuation(f,structure,directS,Qexp,Qtype='S') # #get quarter wavelength amplificationf actors # # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code) # I_P=hfsims.get_amplification_factors(f,structure,zs,alpha,rho*1000) # #Build the entire path term # G_P=(I_P*Q_P)/path_length_P #Get attenuation due to geometrical spreading (from the path length) path_length_S=hfsims.get_path_length(directS,zs,dist_in_degs) path_length_S=path_length_S*100 #to cm #Get effect of intrinsic aptimeenuation for that ray (path integrated) Q_S=hfsims.get_attenuation(f,structure,directS,Qexp) #get quarter wavelength amplificationf actors # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code) I_S=hfsims.get_amplification_factors(f,structure,zs,beta,rho*1000) #Build the entire path term # G_S=(I_S*Q_S)/path_length_S G_S=(1*Q_S)/path_length_S #Get conically averaged radiation pattern terms RP=hfsims.conically_avg_P_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_P) RP=abs(RP) #Get partition of Pwave into Z and N,E components incidence_angle=directP.incident_angle Npartition,Epartition,Zpartition=hfsims.get_P_wave_partition(incidence_angle,azimuth) if component=='Z': Ppartition=Zpartition elif component=='N': Ppartition=Npartition else: Ppartition=Epartition #And finally multiply everything together to get the subfault amplitude spectrum AP=CP*S*G_S*P*RP*Ppartition #Generate windowed time series duration=1./fc_subfault+0.09*(dist/1000) w=hfsims.windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') #Go to frequency domain, apply amplitude spectrum and ifft for final time series hf_seis_P=hfsims.apply_spectrum(w,AP,f,hf_dt) #save thigns to check # if sta=='AL2H': # path_out = '/Users/dmelgarm/FakeQuakes/ONC_debug/analysis/frequency/Pwave/' # path_out = path_out+str(kfault) # # savetxt(path_out+'.all',c_[f,AP]) # # savetxt(path_out+'.source',c_[f,CP*S]) # # savetxt(path_out+'.path',c_[f,G_P]) # # savetxt(path_out+'.site',c_[f,P]) #What time after OT should this time series start at? time_insert=directP.path['time'][-1]+onset_times[kfault] i=argmin(abs(t-time_insert)) j=i+len(hf_seis_P) #Check seismogram doesn't go past last sample if i<len(hf)-1: #if i (the beginning of the seimogram) is less than the length if j>len(hf): #seismogram goes past total_duration length, trim it len_paste=len(hf)-i j=len(hf) #Add seismogram hf[i:j]=hf[i:j]+real(hf_seis_P[0:len_paste]) else: #Lengths are fine hf[i:j]=hf[i:j]+real(hf_seis_P) else: #Seismogram starts after end of available space pass ####### Build Direct S ray ###### if Swave==True: take_off_angle_S=directS.takeoff_angle #Get attenuation due to geometrical spreading (from the path length) path_length_S=hfsims.get_path_length(directS,zs,dist_in_degs) path_length_S=path_length_S*100 #to cm #Get effect of intrinsic aptimeenuation for that ray (path integrated) if Qmethod == 'direct':#No ray tracing use bulka ttenuation along path Q_S = hfsims.get_attenuation_linear(f,structure,zs,dist,Qexp,Qtype='S') else: #Use ray tracing Q_S=hfsims.get_attenuation(f,structure,directS,Qexp,scattering=scattering, Qc_exp=Qc_exp,baseline_Qc=baseline_Qc) #get quarter wavelength amplificationf actors # pass rho in kg/m^3 (this units nightmare is what I get for following Graves' code) I_S=hfsims.get_amplification_factors(f,structure,zs,beta,rho*1000) #Build the entire path term G_S=(I_S*Q_S)/path_length_S # G_S=(1*Q_S)/path_length_S #Get conically averaged radiation pattern terms if component=='Z': RP_vert=hfsims.conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S) #And finally multiply everything together to get the subfault amplitude spectrum AS=CS*S*G_S*P*RP_vert # print('... RP_vert = '+str(RP_vert)) else: RP=hfsims.conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_S,component_angle) RP=abs(RP) # print('... RP_horiz = '+str(RP)) #And finally multiply everything together to get the subfault amplitude spectrum AS=CS*S*G_S*P*RP #Generate windowed time series duration=1./fc_subfault+0.063*(dist/1000) w=hfsims.windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1]) #Go to frequency domain, apply amplitude spectrum and ifft for final time series hf_seis_S=hfsims.apply_spectrum(w,AS,f,hf_dt) #save thigns to check # if sta=='AL2H': # path_out = '/Users/dmelgarm/FakeQuakes/ONC_debug/analysis/frequency/Swave/' # path_out = path_out+str(kfault) # # savetxt(path_out+'.soverp',c_[f,(CS*S)/(CP*S)]) # savetxt(path_out+'.all',c_[f,AS]) # savetxt(path_out+'.source',c_[f,CS*S]) # savetxt(path_out+'.path',c_[f,G_S]) # savetxt(path_out+'.site',c_[f,P]) #What time after OT should this time series start at? time_insert=directS.path['time'][-1]+onset_times[kfault] #print 'ts = '+str(time_insert)+' , Td = '+str(duration) #time_insert=Ppaths[0].path['time'][-1] i=argmin(abs(t-time_insert)) j=i+len(hf_seis_S) #Check seismogram doesn't go past last sample if i<len(hf)-1: #if i (the beginning of the seimogram) is less than the length if j>len(hf): #seismogram goes past total_duration length, trim it len_paste=len(hf)-i j=len(hf) #Add seismogram hf[i:j]=hf[i:j]+real(hf_seis_S[0:len_paste]) else: #Lengths are fine hf[i:j]=hf[i:j]+real(hf_seis_S) else: #Beginning of seismogram is past end of available space pass ####### Build Moho reflected S ray ###### # if mohoS==None: # pass # else: # if kfault%100==0: # print '... ... building Moho reflected S wave' # take_off_angle_mS=mohoS.takeoff_angle # # #Get attenuation due to geometrical spreading (from the path length) # path_length_mS=get_path_length(mohoS,zs,dist_in_degs) # path_length_mS=path_length_mS*100 #to cm # # #Get effect of intrinsic aptimeenuation for that ray (path integrated) # Q_mS=get_attenuation(f,structure,mohoS,Qexp) # # #Build the entire path term # G_mS=(I*Q_mS)/path_length_mS # # #Get conically averaged radiation pattern terms # if component=='Z': # RP_vert=conically_avg_vert_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS) # #And finally multiply everything together to get the subfault amplitude spectrum # A=C*S*G_mS*P*RP_vert # else: # RP=conically_avg_radiation_pattern(strike,dip,rake,azimuth,take_off_angle_mS,component_angle) # RP=abs(RP) # #And finally multiply everything together to get the subfault amplitude spectrum # A=C*S*G_mS*P*RP # # #Generate windowed time series # duration=1./fc_subfault+0.063*(dist/1000) # w=windowed_gaussian(duration,hf_dt,window_type='saragoni_hart') # #w=windowed_gaussian(3*duration,hf_dt,window_type='cua',ptime=Ppaths[0].path['time'][-1],stime=Spaths[0].path['time'][-1]) # # #Go to frequency domain, apply amplitude spectrum and ifft for final time series # hf_seis=apply_spectrum(w,A,f,hf_dt) # # #What time after OT should this time series start at? # time_insert=mohoS.path['time'][-1]+onset_times[kfault] # #print 'ts = '+str(time_insert)+' , Td = '+str(duration) # #time_insert=Ppaths[0].path['time'][-1] # i=argmin(abs(t-time_insert)) # j=i+len(hf_seis) # # #Add seismogram # hf[i:j]=hf[i:j]+hf_seis # # #Done, reset # mohoS=None #Done tr.data=hf/100 #convert to m/s**2 #Add station location, event location, and first P-wave arrival time to SAC header tr.stats.update({'sac':{'stlo':sta_lon,'stla':sta_lat,'evlo':epicenter[0],'evla':epicenter[1],'evdp':epicenter[2],'dist':dist_in_km,'az':az,'baz':backaz,'mag':Mw}}) #,'idep':"ACC (m/s^2)" not sure why idep won't work #Write out to file #old rupture=rupture_name.split('.')[0]+'.'+rupture_name.split('.')[1] #new rupture=rupture_name.rsplit('.',1)[0] if not path.exists(home+project_name+'/output/waveforms/'+rupture+'/'): makedirs(home+project_name+'/output/waveforms/'+rupture+'/') if rank < 10: tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.00'+str(rank)+'.sac',format='SAC') elif rank < 100: tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.0'+str(rank)+'.sac',format='SAC') else: tr.write(home+project_name+'/output/waveforms/'+rupture+'/'+sta+'.HN'+component+'.'+str(rank)+'.sac',format='SAC')
#sta='LUTZ' #lonsta=-121.8652 #latsta= 37.2869 #sta='MILP' #lonsta=-121.8340 #latsta=37.4491 path = '/Users/dmelgar/FakeQuakes/M6_validation_pwave/output/waveforms/M6.000000/' #taup zs = 8.0 g = Geod(ellps='WGS84') azimuth, baz, dist = g.inv(-121.753508, 37.332028, lonsta, latsta) dist_in_degs = kilometer2degrees(dist / 1000.) velmod = TauPyModel( '/Users/dmelgar/FakeQuakes/M6_validation_pwave/structure/bbp_norcal.npz') Ppaths = velmod.get_ray_paths(zs, dist_in_degs, phase_list=['P', 'p']) p = Ppaths[0].time Spaths = velmod.get_ray_paths(zs, dist_in_degs, phase_list=['S', 's']) s = Spaths[0].time nlf = read(path + sta + '.LYN.sac') elf = read(path + sta + '.LYE.sac') zlf = read(path + sta + '.LYZ.sac') nbb = read(path + sta + '.bb.HNN.sac') ebb = read(path + sta + '.bb.HNE.sac') zbb = read(path + sta + '.bb.HNZ.sac')