def _parseRecordAH(self, line, event): """ Parses the 'additional hypocenter' record AH """ date = line[2:10] time = line[11:20] #unused: hypocenter_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] #unused: preliminary_flag = line[37] depth = self._float(line[38:43]) #unused: depth_quality = line[43] standard_dev = self._floatUnused(line[44:48]) station_number = self._intUnused(line[48:51]) phase_number = self._intUnused(line[51:55]) source_code = line[56:60].strip() evid = event.resource_id.id.split('/')[-1] origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_code) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinateSign(lat_type) origin.longitude = longitude * self._coordinateSign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.standard_error = standard_dev origin.quality.used_station_count = station_number origin.quality.used_phase_count = phase_number origin.type = 'hypocenter' event.origins.append(origin)
def _block2event(block, seed_map, id_default, ph2comp, eventid_map): """ Read HypoDD event block """ lines = block.strip().splitlines() yr, mo, dy, hr, mn, sc, la, lo, dp, mg, eh, ez, rms, id_ = lines[0].split() if eventid_map is not None and id_ in eventid_map: id_ = eventid_map[id_] time = UTCDateTime(int(yr), int(mo), int(dy), int(hr), int(mn), float(sc), strict=False) laterr = None if float(eh) == 0 else float(eh) / DEG2KM lonerr = (None if laterr is None or float(la) > 89 else laterr / cos(deg2rad(float(la)))) ez = None if float(ez) == 0 else float(ez) * 1000 rms = None if float(rms) == 0 else float(rms) picks = [] arrivals = [] for line in lines[1:]: sta, reltime, weight, phase = line.split() comp = ph2comp.get(phase, '') wid = seed_map.get(sta, id_default) _waveform_id = WaveformStreamID(seed_string=wid.format(sta, comp)) pick = Pick(waveform_id=_waveform_id, phase_hint=phase, time=time + float(reltime)) arrival = Arrival(phase=phase, pick_id=pick.resource_id, time_weight=float(weight)) picks.append(pick) arrivals.append(arrival) qu = OriginQuality(associated_phase_count=len(picks), standard_error=rms) origin = Origin(arrivals=arrivals, resource_id="smi:local/origin/" + id_, quality=qu, latitude=float(la), longitude=float(lo), depth=1000 * float(dp), latitude_errors=laterr, longitude_errors=lonerr, depth_errors=ez, time=time) if mg.lower() == 'nan': magnitudes = [] preferred_magnitude_id = None else: magnitude = Magnitude(mag=mg, resource_id="smi:local/magnitude/" + id_) magnitudes = [magnitude] preferred_magnitude_id = magnitude.resource_id event = Event(resource_id="smi:local/event/" + id_, picks=picks, origins=[origin], magnitudes=magnitudes, preferred_origin_id=origin.resource_id, preferred_magnitude_id=preferred_magnitude_id) return event
def _parse_record_hy(self, line): """ Parses the 'hypocenter' record HY """ date = line[2:10] time = line[11:20] # unused: location_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] depth = self._float(line[38:43]) # unused: depth_quality = line[43] standard_dev = self._float(line[44:48]) station_number = self._int(line[48:51]) # unused: version_flag = line[51] fe_region_number = line[52:55] fe_region_name = self._decode_fe_region_number(fe_region_number) source_code = line[55:60].strip() event = Event() # FIXME: a smarter way to define evid? evid = date + time res_id = '/'.join((res_id_prefix, 'event', evid)) event.resource_id = ResourceIdentifier(id=res_id) description = EventDescription( type='region name', text=fe_region_name) event.event_descriptions.append(description) description = EventDescription( type='Flinn-Engdahl region', text=fe_region_number) event.event_descriptions.append(description) origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid)) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo() if source_code: origin.creation_info.agency_id = source_code else: origin.creation_info.agency_id = 'USGS-NEIC' res_id = '/'.join((res_id_prefix, 'earthmodel/ak135')) origin.earth_model_id = ResourceIdentifier(id=res_id) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinate_sign(lat_type) origin.longitude = longitude * self._coordinate_sign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.associated_station_count = station_number origin.quality.standard_error = standard_dev # associated_phase_count can be incremented in records 'P ' and 'S ' origin.quality.associated_phase_count = 0 # depth_phase_count can be incremented in record 'S ' origin.quality.depth_phase_count = 0 origin.origin_type = 'hypocenter' origin.region = fe_region_name event.origins.append(origin) return event
def ORNL_events_to_cat(ornl_file): """Make Catalog from ORNL locations""" cat = Catalog() loc_df = pd.read_csv(ornl_file, infer_datetime_format=True) loc_df = loc_df.set_index('event_datetime') eid = 0 for dt, row in loc_df.iterrows(): ot = UTCDateTime(dt) hmc_east = row['x(m)'] hmc_north = row['y(m)'] hmc_elev = row['z(m)'] errX = row['error_x (m)'] errY = row['error_y (m)'] errZ = row['error_z (m)'] rms = row['rms (millisecond)'] converter = SURF_converter() lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north, hmc_elev)) o = Origin(time=ot, latitude=lat, longitude=lon, depth=130 - elev) o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality ou.max_horizontal_uncertainty = np.max([errX, errY]) ou.min_horizontal_uncertainty = np.min([errX, errY]) o.depth_errors.uncertainty = errZ oq.standard_error = rms * 1e3 extra = AttribDict({ 'hmc_east': { 'value': hmc_east, 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': hmc_north, 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': hmc_elev, 'namespace': 'smi:local/hmc' }, 'hmc_eid': { 'value': eid, 'namespace': 'smi:local/hmc' } }) o.extra = extra rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f')) # Dummy magnitude of 1. for all events until further notice mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.)) ev = Event(origins=[o], magnitudes=[mag], resource_id=rid) ev.preferred_origin_id = o.resource_id.id cat.events.append(ev) eid += 1 return cat
def _block2event(block, seed_map, id_default, ph2comp): """ Read HypoDD event block """ lines = block.strip().splitlines() yr, mo, dy, hr, mn, sc, la, lo, dp, mg, eh, ez, rms, id_ = lines[0].split() time = UTCDateTime(int(yr), int(mo), int(dy), int(hr), int(mn), float(sc), strict=False) picks = [] arrivals = [] for line in lines[1:]: sta, reltime, weight, phase = line.split() comp = ph2comp.get(phase, '') wid = seed_map.get(sta, id_default) _waveform_id = WaveformStreamID(seed_string=wid.format(sta, comp)) pick = Pick(waveform_id=_waveform_id, phase_hint=phase, time=time + float(reltime)) arrival = Arrival(phase=phase, pick_id=pick.resource_id, time_weight=float(weight)) picks.append(pick) arrivals.append(arrival) qu = None if rms == '0.0' else OriginQuality(standard_error=float(rms)) origin = Origin(arrivals=arrivals, resource_id="smi:local/origin/" + id_, quality=qu, latitude=float(la), longitude=float(lo), depth=1000 * float(dp), time=time) if mg.lower() == 'nan': magnitudes = [] preferred_magnitude_id = None else: magnitude = Magnitude(mag=mg, resource_id="smi:local/magnitude/" + id_) magnitudes = [magnitude] preferred_magnitude_id = magnitude.resource_id event = Event(resource_id="smi:local/event/" + id_, picks=picks, origins=[origin], magnitudes=magnitudes, preferred_origin_id=origin.resource_id, preferred_magnitude_id=preferred_magnitude_id) return event
def read_header_line(string_line): new_event = Event() line = string_line param_event = line.split()[1:] ### check if line as required number of arguments if len(param_event) != 14: return new_event ### Get parameters year, month, day = [int(x) for x in param_event[0:3]] hour, minu = [int(x) for x in param_event[3:5]] sec = float(param_event[5]) if sec >= 60: sec = 59.999 lat, lon, z = [float(x) for x in param_event[6:9]] mag = float(param_event[9]) errh, errz, rms = [float(x) for x in param_event[10:13]] _time = UTCDateTime(year, month, day, hour, minu, sec) _origin_quality = OriginQuality(standard_error=rms) # change what's next to handle origin with no errors estimates origin = Origin(time=_time, longitude=lon, latitude=lat, depth=z, longitude_errors=QuantityError(uncertainty=errh), latitude_errors=QuantityError(uncertainty=errh), depth_errors=QuantityError(uncertainty=errz), quality=_origin_quality) magnitude = Magnitude(mag=mag, origin_id=origin.resource_id) ### Return new_event.origins.append(origin) new_event.magnitudes.append(magnitude) new_event.preferred_origin_id = origin.resource_id new_event.preferred_magnitude_id = magnitude.resource_id return new_event
def _phase_to_event(event_text): """ Function to convert the text for one event in hypoDD phase format to \ event object. :type event_text: dict :param event_text: dict of two elements, header and picks, header is a \ str, picks is a list of str. :returns: obspy.core.event.Event """ from obspy.core.event import Event, Origin, Magnitude from obspy.core.event import Pick, WaveformStreamID, Arrival, OriginQuality from obspy import UTCDateTime ph_event = Event() # Extract info from header line # YR, MO, DY, HR, MN, SC, LAT, LON, DEP, MAG, EH, EZ, RMS, ID header = event_text['header'].split() ph_event.origins.append(Origin()) ph_event.origins[0].time =\ UTCDateTime(year=int(header[1]), month=int(header[2]), day=int(header[3]), hour=int(header[4]), minute=int(header[5]), second=int(header[6].split('.')[0]), microsecond=int(float(('0.' + header[6].split('.')[1])) * 1000000)) ph_event.origins[0].latitude = float(header[7]) ph_event.origins[0].longitude = float(header[8]) ph_event.origins[0].depth = float(header[9]) * 1000 ph_event.origins[0].quality = OriginQuality( standard_error=float(header[13])) ph_event.magnitudes.append(Magnitude()) ph_event.magnitudes[0].mag = float(header[10]) ph_event.magnitudes[0].magnitude_type = 'M' # Extract arrival info from picks! for i, pick_line in enumerate(event_text['picks']): pick = pick_line.split() _waveform_id = WaveformStreamID(station_code=pick[0]) pick_time = ph_event.origins[0].time + float(pick[1]) ph_event.picks.append( Pick(waveform_id=_waveform_id, phase_hint=pick[3], time=pick_time)) ph_event.origins[0].arrivals.append( Arrival(phase=ph_event.picks[i], pick_id=ph_event.picks[i].resource_id)) ph_event.origins[0].arrivals[i].time_weight = float(pick[2]) return ph_event
def read_origin(event_str): """ Read the origin information from the REST file string :param event_str: Contents of file as list of str :type event_str: list :returns: :class:`obspy.core.event.Event` """ event = Event() head = event_str[0].split() try: gap = float(head[17]) except IndexError: gap = None origin = Origin( time=UTCDateTime( year=int(head[0]), julday=int(head[1]), hour=int(head[2]), minute=int(head[3])) + float(head[4]), latitude=float(head[5]), longitude=float(head[6]), depth=float(head[7]) * 1000, origin_quality=OriginQuality( standard_error=float(head[9]), azimuthal_gap=gap, used_phase_count=int(head[17])), longitude_errors=QuantityError( uncertainty=kilometer2degrees(float(head[12]))), latitude_errors=QuantityError( uncertainty=kilometer2degrees(float(head[11]))), depth_errors=QuantityError(uncertainty=float(head[13]) * 1000), method_id=ResourceIdentifier("smi:local/REST"), evaluation_mode="automatic") event.origins.append(origin) try: event.magnitudes.append(Magnitude( mag=float(head[19]), magnitude_type="M")) except IndexError: pass return event
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs): """ Reads a NonLinLoc Hypocenter-Phase file to a :class:`~obspy.core.event.Catalog` object. .. note:: Coordinate conversion from coordinate frame of NonLinLoc model files / location run to WGS84 has to be specified explicitly by the user if necessary. .. note:: An example can be found on the :mod:`~obspy.io.nlloc` submodule front page in the documentation pages. :param filename: File or file-like object in text mode. :type coordinate_converter: func :param coordinate_converter: Function to convert (x, y, z) coordinates of NonLinLoc output to geographical coordinates and depth in meters (longitude, latitude, depth in kilometers). If left ``None``, NonLinLoc (x, y, z) output is left unchanged (e.g. if it is in geographical coordinates already like for NonLinLoc in global mode). The function should accept three arguments x, y, z (each of type :class:`numpy.ndarray`) and return a tuple of three :class:`numpy.ndarray` (lon, lat, depth in kilometers). :type picks: list of :class:`~obspy.core.event.Pick` :param picks: Original picks used to generate the NonLinLoc location. If provided, the output event will include the original picks and the arrivals in the output origin will link to them correctly (with their ``pick_id`` attribute). If not provided, the output event will include (the rather basic) pick information that can be reconstructed from the NonLinLoc hypocenter-phase file. :rtype: :class:`~obspy.core.event.Catalog` """ if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() lines = data.splitlines() # remember picks originally used in location, if provided original_picks = picks if original_picks is None: original_picks = [] # determine indices of block start/end of the NLLOC output file indices_hyp = [None, None] indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("NLLOC "): indices_hyp[0] = i elif line.startswith("END_NLLOC"): indices_hyp[1] = i elif line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i if any([i is None for i in indices_hyp]): msg = ("NLLOC HYP file seems corrupt," " could not detect 'NLLOC' and 'END_NLLOC' lines.") raise RuntimeError(msg) # strip any other lines around NLLOC block lines = lines[indices_hyp[0]:indices_hyp[1]] # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S")) # maximum likelihood origin location info line line = lines["HYPOCENTER"] x, y, z = map(float, line.split()[1:7:2]) if coordinate_converter: x, y, z = coordinate_converter(x, y, z) # origin time info line line = lines["GEOGRAPHIC"] year, month, day, hour, minute = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, month, day, hour, minute, seconds) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() cat = Catalog(events=[event]) o = Origin() event.origins = [o] o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string)) cat.creation_info.creation_time = UTCDateTime() cat.creation_info.version = "ObsPy %s" % __version__ event.creation_info = CreationInfo(creation_time=creation_time, version=version) event.creation_info.version = version o.creation_info = CreationInfo(creation_time=creation_time, version=version) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() wid = WaveformStreamID(station_code=station) date, hourmin, sec = map(str, line[6:9]) t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id return cat
def _read_single_hypocenter(lines, coordinate_converter, original_picks): """ Given a list of lines (starting with a 'NLLOC' line and ending with a 'END_NLLOC' line), parse them into an Event. """ try: # some paranoid checks.. assert lines[0].startswith("NLLOC ") assert lines[-1].startswith("END_NLLOC") for line in lines[1:-1]: assert not line.startswith("NLLOC ") assert not line.startswith("END_NLLOC") except Exception: msg = ("This should not have happened, please report this as a bug at " "https://github.com/obspy/obspy/issues.") raise Exception(msg) indices_phases = [None, None] for i, line in enumerate(lines): if line.startswith("PHASE "): indices_phases[0] = i elif line.startswith("END_PHASE"): indices_phases[1] = i # extract PHASES lines (if any) if any(indices_phases): if not all(indices_phases): msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.") raise RuntimeError(msg) i1, i2 = indices_phases lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2] else: phases_lines = [] lines = dict([line.split(None, 1) for line in lines[:-1]]) line = lines["SIGNATURE"] line = line.rstrip().split('"')[1] signature, version, date, time = line.rsplit(" ", 3) # new NLLoc > 6.0 seems to add prefix 'run:' before date if date.startswith('run:'): date = date[4:] signature = signature.strip() creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S")) if coordinate_converter: # maximum likelihood origin location in km info line line = lines["HYPOCENTER"] x, y, z = coordinate_converter(*map(float, line.split()[1:7:2])) else: # maximum likelihood origin location lon lat info line line = lines["GEOGRAPHIC"] y, x, z = map(float, line.split()[8:13:2]) # maximum likelihood origin time info line line = lines["GEOGRAPHIC"] year, mon, day, hour, min = map(int, line.split()[1:6]) seconds = float(line.split()[6]) time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False) # distribution statistics line line = lines["STATISTICS"] covariance_xx = float(line.split()[7]) covariance_yy = float(line.split()[13]) covariance_zz = float(line.split()[17]) stats_info_string = str( "Note: Depth/Latitude/Longitude errors are calculated from covariance " "matrix as 1D marginal (Lon/Lat errors as great circle degrees) " "while OriginUncertainty min/max horizontal errors are calculated " "from 2D error ellipsoid and are therefore seemingly higher compared " "to 1D errors. Error estimates can be reconstructed from the " "following original NonLinLoc error statistics line:\nSTATISTICS " + lines["STATISTICS"]) # goto location quality info line line = lines["QML_OriginQuality"].split() (assoc_phase_count, used_phase_count, assoc_station_count, used_station_count, depth_phase_count) = map(int, line[1:11:2]) stderr, az_gap, sec_az_gap = map(float, line[11:17:2]) gt_level = line[17] min_dist, max_dist, med_dist = map(float, line[19:25:2]) # goto location quality info line line = lines["QML_OriginUncertainty"] if "COMMENT" in lines: comment = lines["COMMENT"].strip() comment = comment.strip('\'"') comment = comment.strip() hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \ map(float, line.split()[1:9:2]) # assign origin info event = Event() o = Origin() event.origins = [o] event.preferred_origin_id = o.resource_id o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality o.comments.append(Comment(text=stats_info_string, force_resource_id=False)) event.comments.append(Comment(text=comment, force_resource_id=False)) # SIGNATURE field's first item is LOCSIG, which is supposed to be # 'Identification of an individual, institiution or other entity' # according to # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_ # so use it as author in creation info event.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) o.creation_info = CreationInfo(creation_time=creation_time, version=version, author=signature) # negative values can appear on diagonal of covariance matrix due to a # precision problem in NLLoc implementation when location coordinates are # large compared to the covariances. o.longitude = x try: o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx)) except ValueError: if covariance_xx < 0: msg = ("Negative value in XX value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.latitude = y try: o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy)) except ValueError: if covariance_yy < 0: msg = ("Negative value in YY value of covariance matrix, not " "setting longitude error (epicentral uncertainties will " "still be set in origin uncertainty).") warnings.warn(msg) else: raise o.depth = z * 1e3 # meters! o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3 # meters! o.depth_errors.confidence_level = 68 o.depth_type = str("from location") o.time = time ou.horizontal_uncertainty = hor_unc ou.min_horizontal_uncertainty = min_hor_unc ou.max_horizontal_uncertainty = max_hor_unc # values of -1 seem to be used for unset values, set to None for field in ("horizontal_uncertainty", "min_horizontal_uncertainty", "max_horizontal_uncertainty"): if ou.get(field, -1) == -1: ou[field] = None else: ou[field] *= 1e3 # meters! ou.azimuth_max_horizontal_uncertainty = hor_unc_azim ou.preferred_description = str("uncertainty ellipse") ou.confidence_level = 68 # NonLinLoc in general uses 1-sigma (68%) level oq.standard_error = stderr oq.azimuthal_gap = az_gap oq.secondary_azimuthal_gap = sec_az_gap oq.used_phase_count = used_phase_count oq.used_station_count = used_station_count oq.associated_phase_count = assoc_phase_count oq.associated_station_count = assoc_station_count oq.depth_phase_count = depth_phase_count oq.ground_truth_level = gt_level oq.minimum_distance = kilometer2degrees(min_dist) oq.maximum_distance = kilometer2degrees(max_dist) oq.median_distance = kilometer2degrees(med_dist) # go through all phase info lines for line in phases_lines: line = line.split() arrival = Arrival() o.arrivals.append(arrival) station = str(line[0]) phase = str(line[4]) arrival.phase = phase arrival.distance = kilometer2degrees(float(line[21])) arrival.azimuth = float(line[23]) arrival.takeoff_angle = float(line[24]) arrival.time_residual = float(line[16]) arrival.time_weight = float(line[17]) pick = Pick() # network codes are not used by NonLinLoc, so they can not be known # when reading the .hyp file.. to conform with QuakeML standard set an # empty network code wid = WaveformStreamID(network_code="", station_code=station) # have to split this into ints for overflow to work correctly date, hourmin, sec = map(str, line[6:9]) ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])] hm = [int(hourmin[:2]), int(hourmin[2:4])] t = UTCDateTime(*(ymd + hm), strict=False) + float(sec) pick.waveform_id = wid pick.time = t pick.time_errors.uncertainty = float(line[10]) pick.phase_hint = phase pick.onset = ONSETS.get(line[3].lower(), None) pick.polarity = POLARITIES.get(line[5].lower(), None) # try to determine original pick for each arrival for pick_ in original_picks: wid = pick_.waveform_id if station == wid.station_code and phase == pick_.phase_hint: pick = pick_ break else: # warn if original picks were specified and we could not associate # the arrival correctly if original_picks: msg = ("Could not determine corresponding original pick for " "arrival. " "Falling back to pick information in NonLinLoc " "hypocenter-phase file.") warnings.warn(msg) event.picks.append(pick) arrival.pick_id = pick.resource_id event.scope_resource_ids() return event
def setEventData(eventParser, arrivals, count): global originCount global eventCount global pickCount creation_info = CreationInfo( author='niket_engdahl_parser', creation_time=UTCDateTime(), agency_uri=ResourceIdentifier(id='smi:engdahl.ga.gov.au/ga-engdahl'), agency_id='ga-engdahl') # magnitudeSurface = Magnitude(resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/'+str(originCount)+'#netMag.Ms'), # mag=eventParser.ms, # magnitude_type='Ms', # origin_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/'+str(originCount)), # azimuthal_gap=eventParser.openaz2, # creation_info=creation_info) origin = Origin( resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' + str(originCount)), time=UTCDateTime(int(str(2000 + int(eventParser.iyr))), int(eventParser.mon), int(eventParser.iday), int(eventParser.ihr), int(eventParser.min), int(eventParser.sec.split('.')[0]), int(eventParser.sec.split('.')[1] + '0')), longitude=eventParser.glon, latitude=eventParser.glat, depth=float(eventParser.depth) * 1000, # engdahl files report kms, obspy expects m depth_errors=eventParser.sedep, method_id=ResourceIdentifier(id='EHB'), earth_model_id=ResourceIdentifier(id='ak135'), quality=OriginQuality(associated_phase_count=len(arrivals), used_phase_count=len(arrivals), standard_error=eventParser.se, azimuthal_gap=eventParser.openaz2), evaluation_mode='automatic', creation_info=creation_info) magnitude = Magnitude( resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' + str(originCount) + '#netMag.Mb'), mag=eventParser.mb, magnitude_type='Mb', origin_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' + str(originCount)), azimuthal_gap=eventParser.openaz1, creation_info=creation_info) originCount += 1 pickList = [] arrivalList = [] pPhaseArrival = None for arrParser in arrivals: pickOnset = None pol = None if arrParser.year and arrParser.month and arrParser.day and arrParser.station: pPhaseArrival = arrParser else: arrParser.year = pPhaseArrival.year arrParser.day = pPhaseArrival.day arrParser.month = pPhaseArrival.month arrParser.station = pPhaseArrival.station arrParser.delta = pPhaseArrival.delta arrParser.dtdd = pPhaseArrival.dtdd arrParser.backaz = pPhaseArrival.backaz arrParser.focalDip = pPhaseArrival.focalDip arrParser.angleAzimuth = pPhaseArrival.angleAzimuth if arrParser.phase1 == 'LR' or arrParser.phase2 == 'LR' or arrParser.hour == '24': continue if arrParser.phase1.startswith('i'): pickOnset = PickOnset.impulsive if arrParser.fm == '+': pol = PickPolarity.positive elif arrParser.fm == '-': pol = PickPolarity.negative elif arrParser.phase1.startswith('e'): pickOnset = PickOnset.emergent pick = Pick( resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/pick/' + str(pickCount)), time=UTCDateTime(int(str(2000 + int(arrParser.year))), int(arrParser.month), int(arrParser.day), int(arrParser.hour), int(arrParser.minute), int(arrParser.second.split('.')[0]), int(arrParser.second.split('.')[1] + '0')), waveform_id=WaveformStreamID(network_code='', station_code=arrParser.station, channel_code='BHZ'), methodID=ResourceIdentifier('STA/LTA'), backazimuth=arrParser.backaz if arrParser.backaz else None, onset=pickOnset, phase_hint=arrParser.phase, polarity=pol, evaluation_mode='automatic', # TO-DO comment='populate all the remaining fields here as key value', creation_info=creation_info) if not arrParser.backaz: print "arrParser.backaz is empty. printing the arrParser for debugging" pickCount += 1 pickList.append(pick) arrival = Arrival( pick_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/pick/' + str(pickCount - 1)), phase=arrParser.phase if arrParser.phase else None, azimuth=arrParser.backaz if arrParser.backaz else None, distance=arrParser.delta if arrParser.delta else None, # if the * has some significance, it should be accounted for. ignoring for now. time_residual=arrParser.residual.rstrip('*'), time_weight=arrParser.wgt if arrParser.wgt else None, backazimuth_weight=arrParser.wgt if arrParser.wgt else None) arrivalList.append(arrival) if not arrParser.wgt: print "arrParser.wgt is empty. printing the arrParser for debugging" # pprint.pprint(arrParser) origin.arrivals = arrivalList event = Event(resource_id=ResourceIdentifier( id='smi:engdahl.ga.gov.au/event/' + str(eventCount)), creation_info=creation_info, event_type='earthquake') eventCount += 1 event.picks = pickList event.origins = [ origin, ] event.magnitudes = [ magnitude, ] event.preferred_origin_id = origin.resource_id event.preferred_magnitude_id = magnitude.resource_id return event
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.0, longitude=25.0, depth=15000)) test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.creation_info = CreationInfo(agency_id='TES') test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='ML', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=0.5, magnitude_type='Mc', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=1.3, magnitude_type='Ms', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude test_event.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Need a second pick for coda test_event.picks.append( Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")) # Test a generic local magnitude amplitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML')) # Test a coda magnitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3)) test_event.origins[0].arrivals.append( Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[2].phase_hint, pick_id=test_event.picks[2].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) # Add in error info (line E) test_event.origins[0].quality = OriginQuality(standard_error=0.01, azimuthal_gap=36) # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has # semi-major and semi-minor test_event.origins[0].origin_uncertainty = OriginUncertainty( confidence_ellipsoid=ConfidenceEllipsoid( semi_major_axis_length=3000, semi_minor_axis_length=1000, semi_intermediate_axis_length=2000, major_axis_plunge=20, major_axis_azimuth=100, major_axis_rotation=4)) test_event.origins[0].time_errors = QuantityError(uncertainty=0.5) # Add in fault-plane solution info (line F) - Note have to check program # used to determine which fields are filled.... test_event.focal_mechanisms.append( FocalMechanism(nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(strike=180, dip=20, rake=30, strike_errors=QuantityError(10), dip_errors=QuantityError(10), rake_errors=QuantityError(20))), method_id=ResourceIdentifier( "smi:nc.anss.org/focalMechanism/FPFIT"), creation_info=CreationInfo(agency_id="NC"), misfit=0.5, station_distribution_ratio=0.8)) # Need to test high-precision origin and that it is preferred origin. # Moment tensor includes another origin test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.1, longitude=25.2, depth=14500)) test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='MW', creation_info=CreationInfo('TES'), origin_id=test_event.origins[-1].resource_id)) # Moment tensors go with focal-mechanisms test_event.focal_mechanisms.append( FocalMechanism(moment_tensor=MomentTensor( derived_origin_id=test_event.origins[-1].resource_id, moment_magnitude_id=test_event.magnitudes[-1].resource_id, scalar_moment=100, tensor=Tensor( m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15), method_id=ResourceIdentifier( 'smi:nc.anss.org/momentTensor/BLAH')))) return test_event
def _parseRecordDp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._intZero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + "." + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == "FX": orig_time_stderr = "Fixed" else: orig_time_stderr = self._floatWithFormat(orig_time_stderr, "2.1", scale) centroid_latitude = self._floatWithFormat(line[17:21], "4.2") lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinateSign(lat_type) lat_stderr = line[22:25] if lat_stderr == "FX": lat_stderr = "Fixed" else: lat_stderr = self._floatWithFormat(lat_stderr, "3.2", scale) centroid_longitude = self._floatWithFormat(line[25:30], "5.2") lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinateSign(lon_type) lon_stderr = line[31:34] if lon_stderr == "FX": lon_stderr = "Fixed" else: lon_stderr = self._floatWithFormat(lon_stderr, "3.2", scale) centroid_depth = self._floatWithFormat(line[34:38], "4.1") depth_stderr = line[38:40] if depth_stderr == "FX" or depth_stderr == "BD": depth_stderr = "Fixed" else: depth_stderr = self._floatWithFormat(depth_stderr, "2.1", scale) station_number = self._intZero(line[40:43]) component_number = self._intZero(line[43:46]) station_number2 = self._intZero(line[46:48]) component_number2 = self._intZero(line[48:51]) # unused: half_duration = self._floatWithFormat(line[51:54], '3.1') moment = self._floatWithFormat(line[54:56], "2.1") moment_stderr = self._floatWithFormat(line[56:58], "2.1") moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split("/")[-1] # Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != ".": origin = Origin() res_id = "/".join( (res_id_prefix, "origin", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime("%Y%m%d") origin.time = UTCDateTime(date + centroid_origin_time) # Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._storeUncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == "Fixed" and lon_stderr == "Fixed": origin.epicenter_fixed = True else: self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr)) self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude)) if depth_stderr == "Fixed": origin.depth_type = "operator assigned" else: origin.depth_type = "from location" self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count = station_number + station_number2 quality.used_phase_count = component_number + component_number2 origin.quality = quality origin.type = "centroid" event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = "/".join( (res_id_prefix, "focalmechanism", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info = CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: # this is required for QuakeML validation: res_id = "/".join((res_id_prefix, "no-origin")) moment_tensor.derived_origin_id = ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = "/".join( (res_id_prefix, "momenttensor", evid, source_contributor.lower(), "mw" + computation_type.lower()) ) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == "C": res_id = "/".join((res_id_prefix, "methodID=CMT")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # CMT algorithm uses long-period body waves, # very-long-period surface waves and # intermediate period surface waves (since 2004 # for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = "combined" if computation_type == "M": res_id = "/".join((res_id_prefix, "methodID=moment_tensor")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used by # "moment tensor" algorithm. data_used.wave_type = "unknown" elif computation_type == "B": res_id = "/".join((res_id_prefix, "methodID=broadband_data")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: is 'combined' correct here? data_used.wave_type = "combined" elif computation_type == "F": res_id = "/".join((res_id_prefix, "methodID=P-wave_first_motion")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = "P waves" elif computation_type == "S": res_id = "/".join((res_id_prefix, "methodID=scalar_moment")) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used # for scalar moment determination. data_used.wave_type = "unknown" moment_tensor.data_used = data_used focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1.2 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15000 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].quality = OriginQuality(standard_error=0.01) test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude - 0 test_event.picks = [ Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")] # Test a generic local magnitude amplitude pick test_event.amplitudes = [ Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML'), Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3), Amplitude(generic_amplitude=5.0, period=0.6, pick_id=test_event.picks[2].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', category='point', type='AML')] test_event.origins[0].arrivals = [ Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id), Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25), Arrival(time_weight=2, phase=test_event.picks[4].phase_hint, pick_id=test_event.picks[4].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)] return test_event
def _parse_origin(self, line): # 1-10 i4,a1,i2,a1,i2 epicenter date (yyyy/mm/dd) # 12-22 i2,a1,i2,a1,f5.2 epicenter time (hh:mm:ss.ss) time = UTCDateTime.strptime(line[:17], '%Y/%m/%d %H:%M:') time += float(line[17:22]) # 23 a1 fixed flag (f = fixed origin time solution, blank if # not a fixed origin time) time_fixed = fixed_flag(line[22]) # 25-29 f5.2 origin time error (seconds; blank if fixed origin time) time_error = float_or_none(line[24:29]) time_error = time_error and QuantityError(uncertainty=time_error) # 31-35 f5.2 root mean square of time residuals (seconds) rms = float_or_none(line[30:35]) # 37-44 f8.4 latitude (negative for South) latitude = float_or_none(line[36:44]) # 46-54 f9.4 longitude (negative for West) longitude = float_or_none(line[45:54]) # 55 a1 fixed flag (f = fixed epicenter solution, blank if not # a fixed epicenter solution) epicenter_fixed = fixed_flag(line[54]) # 56-60 f5.1 semi-major axis of 90% ellipse or its estimate # (km, blank if fixed epicenter) _uncertainty_major_m = float_or_none(line[55:60], multiplier=1e3) # 62-66 f5.1 semi-minor axis of 90% ellipse or its estimate # (km, blank if fixed epicenter) _uncertainty_minor_m = float_or_none(line[61:66], multiplier=1e3) # 68-70 i3 strike (0 <= x <= 360) of error ellipse clock-wise from # North (degrees) _uncertainty_major_azimuth = float_or_none(line[67:70]) # 72-76 f5.1 depth (km) depth = float_or_none(line[71:76], multiplier=1e3) # 77 a1 fixed flag (f = fixed depth station, d = depth phases, # blank if not a fixed depth) epicenter_fixed = fixed_flag(line[76]) # 79-82 f4.1 depth error 90% (km; blank if fixed depth) depth_error = float_or_none(line[78:82], multiplier=1e3) # 84-87 i4 number of defining phases used_phase_count = int_or_none(line[83:87]) # 89-92 i4 number of defining stations used_station_count = int_or_none(line[88:92]) # 94-96 i3 gap in azimuth coverage (degrees) azimuthal_gap = float_or_none(line[93:96]) # 98-103 f6.2 distance to closest station (degrees) minimum_distance = float_or_none(line[97:103]) # 105-110 f6.2 distance to furthest station (degrees) maximum_distance = float_or_none(line[104:110]) # 112 a1 analysis type: (a = automatic, m = manual, g = guess) evaluation_mode, evaluation_status = \ evaluation_mode_and_status(line[111]) # 114 a1 location method: (i = inversion, p = pattern # recognition, g = ground truth, o = # other) location_method = LOCATION_METHODS[line[113].strip().lower()] # 116-117 a2 event type: # XXX event type and event type certainty is specified per origin, # XXX not sure how to bset handle this, for now only use it if # XXX information on the individual origins do not clash.. not sure yet # XXX how to identify the preferred origin.. event_type, event_type_certainty = \ EVENT_TYPE_CERTAINTY[line[115:117].strip().lower()] # 119-127 a9 author of the origin author = line[118:127].strip() # 129-136 a8 origin identification origin_id = self._construct_id(['origin', line[128:136].strip()]) # do some combinations depth_error = depth_error and dict(uncertainty=depth_error, confidence_level=90) if all(v is not None for v in (_uncertainty_major_m, _uncertainty_minor_m, _uncertainty_major_azimuth)): origin_uncertainty = OriginUncertainty( min_horizontal_uncertainty=_uncertainty_minor_m, max_horizontal_uncertainty=_uncertainty_major_m, azimuth_max_horizontal_uncertainty=_uncertainty_major_azimuth, preferred_description='uncertainty ellipse', confidence_level=90) # event init always sets an empty QuantityError, even when # specifying None, which is strange for key in ['confidence_ellipsoid']: setattr(origin_uncertainty, key, None) else: origin_uncertainty = None origin_quality = OriginQuality(standard_error=rms, used_phase_count=used_phase_count, used_station_count=used_station_count, azimuthal_gap=azimuthal_gap, minimum_distance=minimum_distance, maximum_distance=maximum_distance) comments = [] if location_method: comments.append( self._make_comment('location method: ' + location_method)) if author: creation_info = CreationInfo(author=author) else: creation_info = None # assemble whole event origin = Origin(time=time, resource_id=origin_id, longitude=longitude, latitude=latitude, depth=depth, depth_errors=depth_error, origin_uncertainty=origin_uncertainty, time_fixed=time_fixed, epicenter_fixed=epicenter_fixed, origin_quality=origin_quality, comments=comments, creation_info=creation_info) # event init always sets an empty QuantityError, even when specifying # None, which is strange for key in ('time_errors', 'longitude_errors', 'latitude_errors', 'depth_errors'): setattr(origin, key, None) return origin, event_type, event_type_certainty
def surf_events_to_cat(loc_file, pick_file): """ Take location files (hypoinverse formatted) and picks (format TBD) and creates a single obspy catalog for later use and dissemination. :param loc_file: File path :param pick_file: File path :return: obspy.core.Catalog """ # Read/parse location file and create Events for each surf_cat = Catalog() # Parse the pick file to a dictionary pick_dict = parse_picks(pick_file) with open(loc_file, 'r') as f: next(f) for ln in f: ln = ln.strip('\n') line = ln.split(',') eid = line[0] if eid not in pick_dict: print('No picks for this location, skipping for now.') continue ot = UTCDateTime(line[1]) hmc_east = float(line[2]) hmc_north = float(line[3]) hmc_elev = float(line[4]) gap = float(line[-5]) rms = float(line[-3]) errXY = float(line[-2]) errZ = float(line[-1]) converter = SURF_converter() lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north, hmc_elev)) o = Origin(time=ot, longitude=lon, latitude=lat, depth=130 - elev) o.origin_uncertainty = OriginUncertainty() o.quality = OriginQuality() ou = o.origin_uncertainty oq = o.quality ou.horizontal_uncertainty = errXY * 1e3 ou.preferred_description = "horizontal uncertainty" o.depth_errors.uncertainty = errZ * 1e3 oq.standard_error = rms oq.azimuthal_gap = gap extra = AttribDict({ 'hmc_east': { 'value': hmc_east, 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': hmc_north, 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': hmc_elev, 'namespace': 'smi:local/hmc' }, 'hmc_eid': { 'value': eid, 'namespace': 'smi:local/hmc' } }) o.extra = extra rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f')) # Dummy magnitude of 1. for all events until further notice mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.)) ev = Event(origins=[o], magnitudes=[mag], picks=pick_dict[eid], resource_id=rid) surf_cat.append(ev) return surf_cat
def __toOrigin(parser, origin_el): """ Parses a given origin etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type origin_el: etree.element :param origin_el: origin element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Origin` object. """ global CURRENT_TYPE origin = Origin() origin.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "origin"])) # I guess setting the program used as the method id is fine. origin.method_id = "%s/location_method/%s/1" % ( RESOURCE_ROOT, parser.xpath2obj('program', origin_el)) if str(origin.method_id).lower().endswith("none"): origin.method_id = None # Standard parameters. origin.time, origin.time_errors = \ __toTimeQuantity(parser, origin_el, "time") origin.latitude, origin_latitude_error = \ __toFloatQuantity(parser, origin_el, "latitude") origin.longitude, origin_longitude_error = \ __toFloatQuantity(parser, origin_el, "longitude") origin.depth, origin.depth_errors = \ __toFloatQuantity(parser, origin_el, "depth") if origin_longitude_error: origin_longitude_error = origin_longitude_error["uncertainty"] if origin_latitude_error: origin_latitude_error = origin_latitude_error["uncertainty"] # Figure out the depth type. depth_type = parser.xpath2obj("depth_type", origin_el) # Map Seishub specific depth type to the QuakeML depth type. if depth_type == "from location program": depth_type = "from location" if depth_type is not None: origin.depth_type = depth_type # XXX: CHECK DEPTH ORIENTATION!! if CURRENT_TYPE == "seiscomp3": origin.depth *= 1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 else: # Convert to m. origin.depth *= -1000 if origin.depth_errors.uncertainty: origin.depth_errors.uncertainty *= 1000 # Earth model. earth_mod = parser.xpath2obj('earth_mod', origin_el, str) if earth_mod: earth_mod = earth_mod.split() earth_mod = ",".join(earth_mod) origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT, earth_mod) if (origin_latitude_error is None or origin_longitude_error is None) and \ CURRENT_TYPE not in ["seiscomp3", "toni"]: print "AAAAAAAAAAAAA" raise Exception if origin_latitude_error and origin_latitude_error: if CURRENT_TYPE in ["baynet", "obspyck"]: uncert = OriginUncertainty() if origin_latitude_error > origin_longitude_error: uncert.azimuth_max_horizontal_uncertainty = 0 else: uncert.azimuth_max_horizontal_uncertainty = 90 uncert.min_horizontal_uncertainty, \ uncert.max_horizontal_uncertainty = \ sorted([origin_longitude_error, origin_latitude_error]) uncert.min_horizontal_uncertainty *= 1000.0 uncert.max_horizontal_uncertainty *= 1000.0 uncert.preferred_description = "uncertainty ellipse" origin.origin_uncertainty = uncert elif CURRENT_TYPE == "earthworm": uncert = OriginUncertainty() uncert.horizontal_uncertainty = origin_latitude_error uncert.horizontal_uncertainty *= 1000.0 uncert.preferred_description = "horizontal uncertainty" origin.origin_uncertainty = uncert elif CURRENT_TYPE in ["seiscomp3", "toni"]: pass else: raise Exception # Parse the OriginQuality if applicable. if not origin_el.xpath("originQuality"): return origin origin_quality_el = origin_el.xpath("originQuality")[0] origin.quality = OriginQuality() origin.quality.associated_phase_count = \ parser.xpath2obj("associatedPhaseCount", origin_quality_el, int) # QuakeML does apparently not distinguish between P and S wave phase # count. Some Seishub event files do. p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el, int) s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el, int) # Use both in case they are set. if p_phase_count is not None and s_phase_count is not None: phase_count = p_phase_count + s_phase_count # Also add two Seishub element file specific elements. origin.quality.p_used_phase_count = p_phase_count origin.quality.s_used_phase_count = s_phase_count # Otherwise the total usedPhaseCount should be specified. else: phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el, int) if p_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountP = { 'value': p_phase_count, 'namespace': NAMESPACE } if s_phase_count is not None: origin.quality.setdefault("extra", AttribDict()) origin.quality.extra.usedPhaseCountS = { 'value': s_phase_count, 'namespace': NAMESPACE } origin.quality.used_phase_count = phase_count associated_station_count = \ parser.xpath2obj("associatedStationCount", origin_quality_el, int) used_station_count = parser.xpath2obj("usedStationCount", origin_quality_el, int) depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el, int) standard_error = parser.xpath2obj("standardError", origin_quality_el, float) azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float) secondary_azimuthal_gap = \ parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float) ground_truth_level = parser.xpath2obj("groundTruthLevel", origin_quality_el, str) minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el, float) maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el, float) median_distance = parser.xpath2obj("medianDistance", origin_quality_el, float) if minimum_distance is not None: minimum_distance = kilometer2degrees(minimum_distance) if maximum_distance is not None: maximum_distance = kilometer2degrees(maximum_distance) if median_distance is not None: median_distance = kilometer2degrees(median_distance) if associated_station_count is not None: origin.quality.associated_station_count = associated_station_count if used_station_count is not None: origin.quality.used_station_count = used_station_count if depth_phase_count is not None: origin.quality.depth_phase_count = depth_phase_count if standard_error is not None and not math.isnan(standard_error): origin.quality.standard_error = standard_error if azimuthal_gap is not None: origin.quality.azimuthal_gap = azimuthal_gap if secondary_azimuthal_gap is not None: origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap if ground_truth_level is not None: origin.quality.ground_truth_level = ground_truth_level if minimum_distance is not None: origin.quality.minimum_distance = minimum_distance if maximum_distance is not None: origin.quality.maximum_distance = maximum_distance if median_distance is not None and not math.isnan(median_distance): origin.quality.median_distance = median_distance return origin
def _parse_first_line_origin(self, line, event, magnitudes): """ Parse the first line of origin data. :type line: str :param line: Line to parse. :type event: :class:`~obspy.core.event.event.Event` :param event: Event of the origin. :type magnitudes: list of :class:`~obspy.core.event.magnitude.Magnitude` :param magnitudes: Store magnitudes in a list to keep their positions. :rtype: :class:`~obspy.core.event.origin.Origin`, :class:`~obspy.core.event.resourceid.ResourceIdentifier` :returns: Parsed origin or None, resource identifier of the origin. """ magnitude_types = [] magnitude_values = [] magnitude_station_counts = [] fields = self.fields['line_1'] time_origin = line[fields['time']].strip() time_fixed_flag = line[fields['time_fixf']].strip() latitude = line[fields['lat']].strip() longitude = line[fields['lon']].strip() epicenter_fixed_flag = line[fields['epicenter_fixf']].strip() depth = line[fields['depth']].strip() depth_fixed_flag = line[fields['depth_fixf']].strip() phase_count = line[fields['n_def']].strip() station_count = line[fields['n_sta']].strip() azimuthal_gap = line[fields['gap']].strip() magnitude_types.append(line[fields['mag_type_1']].strip()) magnitude_values.append(line[fields['mag_1']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_1']].strip()) magnitude_types.append(line[fields['mag_type_2']].strip()) magnitude_values.append(line[fields['mag_2']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_2']].strip()) magnitude_types.append(line[fields['mag_type_3']].strip()) magnitude_values.append(line[fields['mag_3']].strip()) magnitude_station_counts.append(line[fields['mag_n_sta_3']].strip()) author = line[fields['author']].strip() origin_id = line[fields['id']].strip() origin = Origin() origin.quality = OriginQuality() try: origin.time = UTCDateTime(time_origin.replace('/', '-')) origin.latitude = float(latitude) origin.longitude = float(longitude) except (TypeError, ValueError): self._warn('Missing origin data, skipping event') return None, None origin.time_fixed = time_fixed_flag.lower() == 'f' origin.epicenter_fixed = epicenter_fixed_flag.lower() == 'f' try: # Convert value from km to m origin.depth = float(depth) * 1000 except ValueError: pass try: origin.depth_type = DEPTH_TYPES[depth_fixed_flag] except KeyError: origin.depth_type = OriginDepthType('from location') try: origin.quality.used_phase_count = int(phase_count) origin.quality.associated_phase_count = int(phase_count) except ValueError: pass try: origin.quality.used_station_count = int(station_count) origin.quality.associated_station_count = int(station_count) except ValueError: pass try: origin.quality.azimuthal_gap = float(azimuthal_gap) except ValueError: pass self.author = author origin.creation_info = self._get_creation_info() public_id = "origin/%s" % origin_id origin_res_id = self._get_res_id(public_id) for i in range(3): try: magnitude = Magnitude() magnitude.creation_info = self._get_creation_info() magnitude.magnitude_type = magnitude_types[i] magnitude.mag = float(magnitude_values[i]) magnitude.station_count = int(magnitude_station_counts[i]) magnitude.origin_id = origin_res_id magnitudes.append(magnitude) event.magnitudes.append(magnitude) except ValueError: # Magnitude can be empty but we need to keep the # position between mag1, mag2 or mag3. magnitudes.append(None) return origin, origin_res_id
'distance (deg)': ('distance', float), 'theo. azimuth (deg)': ('azimuth', float), 'weight': ('time_weight', float), }, 'origin': { 'origin time': ('time', to_utcdatetime), 'latitude': ('latitude', float), 'longitude': ('longitude', float), 'depth (km)': ('depth', _km2m), 'error in origin time': ('time_errors', float), 'error in latitude (km)': ('latitude_errors', _km2deg), 'error in longitude (km)': ('longitude_errors', _km2deg), # (correction for lat in _read_evt) 'error in depth (km)': ('depth_errors', _km2m), 'no. of stations used': ( 'quality', lambda x: OriginQuality(used_station_count=int(x))), 'source region': ('region', str) }, 'origin_uncertainty': { 'error ellipse major': ('max_horizontal_uncertainty', _km2m), 'error ellipse minor': ('min_horizontal_uncertainty', _km2m), 'error ellipse strike': ('azimuth_max_horizontal_uncertainty', float) }, 'event': { 'event type': ('event_type', _event_type) } # no dict for magnitudes, these are handled by function _mag } # define supported keys just for documentation
def sdxtoquakeml(sdx_dir, out_xml, time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5], catalog_description="", catalog_version="", agency_id="", author="", vel_mod_id=""): """ Convert SDX to QuakeML format using ObsPy inventory structure. SDX filename prefix is stored under event description. Input parameters: - sdx_dir: directory containing sdx files (required) - out_xml: Filename of quakeML file (required) - time_uncertainties: List containing time uncertainities in seconds for mapping from weights 0-4, respectively (optional) - catalog_description (optional) - cat_agency_id (optional) - author (optional) - vel_mod_id (optional) Output: - xml catalog in QuakeML format. """ # Prepare catalog cat = Catalog(description=catalog_description, creation_info=CreationInfo(author=author, agency_id=agency_id, version=catalog_version)) # Read in sdx files in directory, recursively files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True) if len(files) == 0: print("No SDX files found in path. Exiting") for sdx_file_path in files: print("Working on ", sdx_file_path.split('/')[-1]) # Set-up event evt_id = (sdx_file_path.split('/')[-1])[:-4] event = Event(event_type="earthquake", creation_info=CreationInfo(author=author, agency_id=agency_id), event_descriptions=[EventDescription(text=evt_id)]) # Get station details, append to arrays sdx_file = open(sdx_file_path, "r") stations = [] for line in sdx_file: if line.rstrip() == "station": sdxstation = list(islice(sdx_file, 5)) stations.append([ sdxstation[1].split()[0], float(sdxstation[2].split()[0]), float(sdxstation[3].split()[0]), float(sdxstation[4].split()[0]) ]) sdx_file.close() # Find origin details, append to origin object sdx_file = open(sdx_file_path, "r") found_origin = False for line in sdx_file: if line.rstrip() == "origin": found_origin = True sdxorigin = list(islice(sdx_file, 17)) orig_time = ("{:}T{:}".format( sdxorigin[1][0:10].replace(".", "-"), sdxorigin[1][11:23])) evt_lat = float(sdxorigin[2].split()[0]) evt_lon = float(sdxorigin[3].split()[0]) evt_depth = float(sdxorigin[4].split()[0]) creation_time = UTCDateTime("{:}T{:}".format( sdxorigin[16].split()[6][0:10].replace(".", "-"), sdxorigin[16].split()[6][11:23])) num_arrivals = int(sdxorigin[12].split()[0]) num_arrivals_p = (int(sdxorigin[12].split()[0]) - int(sdxorigin[12].split()[1])) min_dist = float(sdxorigin[12].split()[9]) max_dist = float(sdxorigin[12].split()[10]) med_dist = float(sdxorigin[12].split()[11]) max_az_gap = float(sdxorigin[12].split()[6]) origin = Origin(time=UTCDateTime(orig_time), longitude=evt_lon, latitude=evt_lat, depth=evt_depth * -1000, earth_model_id=vel_mod_id, origin_type="hypocenter", evaluation_mode="manual", evaluation_status="confirmed", method_id=ResourceIdentifier(id="SDX_hypo71"), creation_info=CreationInfo( creation_time=creation_time, author=author, agency_id=agency_id), quality=OriginQuality( associated_phase_count=num_arrivals, used_phase_count=num_arrivals, associated_station_count=num_arrivals_p, used_station_count=num_arrivals_p, azimuthal_gap=max_az_gap, minimum_distance=min_dist, maximum_distance=max_dist, median_distance=med_dist)) event.origins.append(origin) sdx_file.close() # Skip event if no computed origin if found_origin is False: print("No origin found ... skipping event") continue # Get pick details, append to pick and arrival objects sdx_file = open(sdx_file_path, "r") found_pick = False for line in sdx_file: if line.rstrip() == "pick": found_pick = True sdxpick = list(islice(sdx_file, 15)) pick_time = UTCDateTime("{:}T{:}".format( sdxpick[1][0:10].replace(".", "-"), sdxpick[1][11:23])) network = sdxpick[2].split()[0] station = sdxpick[2].split()[1] location = sdxpick[2].split()[2] if "NOT_SET" in location: location = "" channel = sdxpick[2].split()[3] onset = sdxpick[8].split()[0] if onset == "0": pickonset = "emergent" elif onset == "1": pickonset = "impulsive" elif onset == "2": pickonset = "questionable" phase = sdxpick[9].split()[0] polarity = sdxpick[10].split()[0] if polarity == "0": pol = "positive" elif polarity == "1": pol = "negative" elif polarity == "2": pol = "undecidable" weight = int(sdxpick[11].split()[0]) creation_time = UTCDateTime("{:}T{:}".format( sdxpick[14].split()[6][0:10].replace(".", "-"), sdxpick[14].split()[6][11:23])) pick = Pick( time=pick_time, waveform_id=WaveformStreamID(network_code=network, station_code=station, location_code=location, channel_code=channel), time_errors=time_uncertainties[weight], evaluation_mode="manual", evaluation_status="confirmed", onset=pickonset, phase_hint=phase, polarity=pol, method_id=ResourceIdentifier(id="SDX"), creation_info=CreationInfo(creation_time=creation_time)) event.picks.append(pick) # Compute azimuth, distance, append to arrival object for i in range(0, len(stations)): if stations[i][0] == station: azimuth = (gps2dist_azimuth(evt_lat, evt_lon, stations[i][1], stations[i][2])[1]) dist_deg = locations2degrees(evt_lat, evt_lon, stations[i][1], stations[i][2]) arrival = Arrival(phase=phase, pick_id=pick.resource_id, azimuth=azimuth, distance=dist_deg, time_weight=1.00) event.origins[0].arrivals.append(arrival) # Skip event if no picks if found_pick is False: print("No picks found ... skipping event") continue # Set preferred origin and append event to catalogue event.preferred_origin_id = event.origins[0].resource_id cat.events.append(event) sdx_file.close() cat.write(out_xml, format="QUAKEML")
def _readheader(f): """ Internal header reader. :type f: file :param f: File open in read-mode. :returns: :class:`~obspy.core.event.event.Event` """ f.seek(0) # Base populate to allow for empty parts of file new_event = Event() topline = _get_headline(f=f) if not topline: raise NordicParsingError('No header found, or incorrect ' 'formatting: corrupt s-file') try: sfile_seconds = int(topline[16:18]) if sfile_seconds == 60: sfile_seconds = 0 add_seconds = 60 else: add_seconds = 0 new_event.origins.append(Origin()) new_event.origins[0].time = UTCDateTime(int(topline[1:5]), int(topline[6:8]), int(topline[8:10]), int(topline[11:13]), int(topline[13:15]), sfile_seconds, int(topline[19:20]) * 100000)\ + add_seconds except Exception: NordicParsingError("Couldn't read a date from sfile") # new_event.loc_mod_ind=topline[20] new_event.event_descriptions.append(EventDescription()) new_event.event_descriptions[0].text = topline[21:23] # new_event.ev_id=topline[22] try: new_event.origins[0].latitude = float(topline[23:30]) new_event.origins[0].longitude = float(topline[31:38]) new_event.origins[0].depth = float(topline[39:43]) * 1000 except ValueError: # The origin 'requires' a lat & long new_event.origins[0].latitude = None new_event.origins[0].longitude = None new_event.origins[0].depth = None # new_event.depth_ind = topline[44] # new_event.loc_ind = topline[45] new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip()) ksta = Comment(text='Number of stations=' + topline[49:51].strip()) new_event.origins[0].comments.append(ksta) if _float_conv(topline[51:55]) is not None: new_event.origins[0].quality = OriginQuality( standard_error=_float_conv(topline[51:55])) # Read in magnitudes if they are there. for index in [59, 67, 75]: if not topline[index].isspace(): new_event.magnitudes.append(Magnitude()) new_event.magnitudes[-1].mag = _float_conv(topline[index - 3:index]) new_event.magnitudes[-1].magnitude_type = \ _nortoevmag(topline[index]) new_event.magnitudes[-1].creation_info = \ CreationInfo(agency_id=topline[index + 1:index + 4].strip()) new_event.magnitudes[-1].origin_id = new_event.origins[0].\ resource_id # Set the useful things like preferred magnitude and preferred origin new_event.preferred_origin_id = new_event.origins[0].resource_id try: # Select moment first, then local, then mag_filter = [ 'MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb', 'MS', 'Ms', 'MC', 'Mc' ] _magnitudes = [(m.magnitude_type, m.resource_id) for m in new_event.magnitudes] preferred_magnitude = sorted(_magnitudes, key=lambda x: mag_filter.index(x[0]))[0] new_event.preferred_magnitude_id = preferred_magnitude[1] except (ValueError, IndexError): # If there is a magnitude not specified in filter try: new_event.preferred_magnitude_id = new_event.magnitudes[0].\ resource_id except IndexError: pass return new_event
def _parseRecordDp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._intZero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + '.' + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == 'FX': orig_time_stderr = 'Fixed' else: orig_time_stderr =\ self._floatWithFormat(orig_time_stderr, '2.1', scale) centroid_latitude = self._floatWithFormat(line[17:21], '4.2') lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinateSign(lat_type) lat_stderr = line[22:25] if lat_stderr == 'FX': lat_stderr = 'Fixed' else: lat_stderr = self._floatWithFormat(lat_stderr, '3.2', scale) centroid_longitude = self._floatWithFormat(line[25:30], '5.2') lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinateSign(lon_type) lon_stderr = line[31:34] if lon_stderr == 'FX': lon_stderr = 'Fixed' else: lon_stderr = self._floatWithFormat(lon_stderr, '3.2', scale) centroid_depth = self._floatWithFormat(line[34:38], '4.1') depth_stderr = line[38:40] if depth_stderr == 'FX' or depth_stderr == 'BD': depth_stderr = 'Fixed' else: depth_stderr = self._floatWithFormat(depth_stderr, '2.1', scale) station_number = self._intZero(line[40:43]) component_number = self._intZero(line[43:46]) station_number2 = self._intZero(line[46:48]) component_number2 = self._intZero(line[48:51]) #unused: half_duration = self._floatWithFormat(line[51:54], '3.1') moment = self._floatWithFormat(line[54:56], '2.1') moment_stderr = self._floatWithFormat(line[56:58], '2.1') moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split('/')[-1] #Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != '.': origin = Origin() res_id = '/'.join( (res_id_prefix, 'origin', evid, source_contributor.lower(), 'mw' + computation_type.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info =\ CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime('%Y%m%d') origin.time = UTCDateTime(date + centroid_origin_time) #Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._storeUncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == 'Fixed' and lon_stderr == 'Fixed': origin.epicenter_fixed = True else: self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr)) self._storeUncertainty( origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude)) if depth_stderr == 'Fixed': origin.depth_type = 'operator assigned' else: origin.depth_type = 'from location' self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count =\ station_number + station_number2 quality.used_phase_count =\ component_number + component_number2 origin.quality = quality origin.type = 'centroid' event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = '/'.join( (res_id_prefix, 'focalmechanism', evid, source_contributor.lower(), 'mw' + computation_type.lower())) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info =\ CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: #this is required for QuakeML validation: res_id = '/'.join((res_id_prefix, 'no-origin')) moment_tensor.derived_origin_id =\ ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = '/'.join( (res_id_prefix, 'momenttensor', evid, source_contributor.lower(), 'mw' + computation_type.lower())) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == 'C': res_id = '/'.join((res_id_prefix, 'methodID=CMT')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #CMT algorithm uses long-period body waves, #very-long-period surface waves and #intermediate period surface waves (since 2004 #for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = 'combined' if computation_type == 'M': res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: not sure which kind of data is used by #"moment tensor" algorithm. data_used.wave_type = 'unknown' elif computation_type == 'B': res_id = '/'.join((res_id_prefix, 'methodID=broadband_data')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: is 'combined' correct here? data_used.wave_type = 'combined' elif computation_type == 'F': res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = 'P waves' elif computation_type == 'S': res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) #FIXME: not sure which kind of data is used #for scalar moment determination. data_used.wave_type = 'unknown' moment_tensor.data_used = data_used focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism
def _parse_record_dp(self, line, event): """ Parses the 'source parameter data - primary' record Dp """ source_contributor = line[2:6].strip() computation_type = line[6] exponent = self._int_zero(line[7]) scale = math.pow(10, exponent) centroid_origin_time = line[8:14] + '.' + line[14] orig_time_stderr = line[15:17] if orig_time_stderr == 'FX': orig_time_stderr = 'Fixed' else: orig_time_stderr = \ self._float_with_format(orig_time_stderr, '2.1', scale) centroid_latitude = self._float_with_format(line[17:21], '4.2') lat_type = line[21] if centroid_latitude is not None: centroid_latitude *= self._coordinate_sign(lat_type) lat_stderr = line[22:25] if lat_stderr == 'FX': lat_stderr = 'Fixed' else: lat_stderr = self._float_with_format(lat_stderr, '3.2', scale) centroid_longitude = self._float_with_format(line[25:30], '5.2') lon_type = line[30] if centroid_longitude is not None: centroid_longitude *= self._coordinate_sign(lon_type) lon_stderr = line[31:34] if lon_stderr == 'FX': lon_stderr = 'Fixed' else: lon_stderr = self._float_with_format(lon_stderr, '3.2', scale) centroid_depth = self._float_with_format(line[34:38], '4.1') depth_stderr = line[38:40] if depth_stderr == 'FX' or depth_stderr == 'BD': depth_stderr = 'Fixed' else: depth_stderr = self._float_with_format(depth_stderr, '2.1', scale) station_number = self._int_zero(line[40:43]) component_number = self._int_zero(line[43:46]) station_number2 = self._int_zero(line[46:48]) component_number2 = self._int_zero(line[48:51]) # unused: half_duration = self._float_with_format(line[51:54], '3.1') moment = self._float_with_format(line[54:56], '2.1') moment_stderr = self._float_with_format(line[56:58], '2.1') moment_exponent = self._int(line[58:60]) if (moment is not None) and (moment_exponent is not None): moment *= math.pow(10, moment_exponent) if (moment_stderr is not None) and (moment_exponent is not None): moment_stderr *= math.pow(10, moment_exponent) evid = event.resource_id.id.split('/')[-1] # Create a new origin only if centroid time is defined: origin = None if centroid_origin_time.strip() != '.': origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_contributor.lower(), 'mw' + computation_type.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = \ CreationInfo(agency_id=source_contributor) date = event.origins[0].time.strftime('%Y%m%d') origin.time = UTCDateTime(date + centroid_origin_time) # Check if centroid time is on the next day: if origin.time < event.origins[0].time: origin.time += timedelta(days=1) self._store_uncertainty(origin.time_errors, orig_time_stderr) origin.latitude = centroid_latitude origin.longitude = centroid_longitude origin.depth = centroid_depth * 1000 if lat_stderr == 'Fixed' and lon_stderr == 'Fixed': origin.epicenter_fixed = True else: self._store_uncertainty(origin.latitude_errors, self._lat_err_to_deg(lat_stderr)) self._store_uncertainty(origin.longitude_errors, self._lon_err_to_deg(lon_stderr, origin.latitude)) if depth_stderr == 'Fixed': origin.depth_type = 'operator assigned' else: origin.depth_type = 'from location' self._store_uncertainty(origin.depth_errors, depth_stderr, scale=1000) quality = OriginQuality() quality.used_station_count = \ station_number + station_number2 quality.used_phase_count = \ component_number + component_number2 origin.quality = quality origin.origin_type = 'centroid' event.origins.append(origin) focal_mechanism = FocalMechanism() res_id = '/'.join((res_id_prefix, 'focalmechanism', evid, source_contributor.lower(), 'mw' + computation_type.lower())) focal_mechanism.resource_id = ResourceIdentifier(id=res_id) focal_mechanism.creation_info = \ CreationInfo(agency_id=source_contributor) moment_tensor = MomentTensor() if origin is not None: moment_tensor.derived_origin_id = origin.resource_id else: # this is required for QuakeML validation: res_id = '/'.join((res_id_prefix, 'no-origin')) moment_tensor.derived_origin_id = \ ResourceIdentifier(id=res_id) for mag in event.magnitudes: if mag.creation_info.agency_id == source_contributor: moment_tensor.moment_magnitude_id = mag.resource_id res_id = '/'.join((res_id_prefix, 'momenttensor', evid, source_contributor.lower(), 'mw' + computation_type.lower())) moment_tensor.resource_id = ResourceIdentifier(id=res_id) moment_tensor.scalar_moment = moment self._store_uncertainty(moment_tensor.scalar_moment_errors, moment_stderr) data_used = DataUsed() data_used.station_count = station_number + station_number2 data_used.component_count = component_number + component_number2 if computation_type == 'C': res_id = '/'.join((res_id_prefix, 'methodID=CMT')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # CMT algorithm uses long-period body waves, # very-long-period surface waves and # intermediate period surface waves (since 2004 # for shallow and intermediate-depth earthquakes # --Ekstrom et al., 2012) data_used.wave_type = 'combined' if computation_type == 'M': res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used by # "moment tensor" algorithm. data_used.wave_type = 'unknown' elif computation_type == 'B': res_id = '/'.join((res_id_prefix, 'methodID=broadband_data')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: is 'combined' correct here? data_used.wave_type = 'combined' elif computation_type == 'F': res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) data_used.wave_type = 'P waves' elif computation_type == 'S': res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment')) focal_mechanism.method_id = ResourceIdentifier(id=res_id) # FIXME: not sure which kind of data is used # for scalar moment determination. data_used.wave_type = 'unknown' moment_tensor.data_used = [data_used] focal_mechanism.moment_tensor = moment_tensor event.focal_mechanisms.append(focal_mechanism) return focal_mechanism