Exemplo n.º 1
0
    def get_event_object(self):
        '''
        update events otime,lat,lon and mag with IRIS (or any other clients) catalog
        '''

        # get parameters from the cataog
        if self.use_catalog == 1:
            print("WARNING using event data from the IRIS catalog")
            cat = self.client.get_events(
                starttime=self.otime - self.sec_before_after_event,
                endtime=self.otime + self.sec_before_after_event)
            self.ev = cat[0]

            # use catalog parameters
            self.otime = self.ev.origins[0].time
            self.elat = self.ev.origins[0].latitude
            self.elon = self.ev.origins[0].longitude
            self.edep = self.ev.origins[0].depth
            self.emag = self.ev.magnitudes[0].mag

        # use parameters from the input file
        else:
            print("WARNING using event data from user-defined catalog")
            #self.ev = Event()
            org = Origin()
            org.latitude = self.elat
            org.longitude = self.elon
            org.depth = self.edep
            org.time = self.otime
            mag = Magnitude()
            mag.mag = self.emag
            mag.magnitude_type = "Mw"
            self.ev.origins.append(org)
            self.ev.magnitudes.append(mag)
Exemplo n.º 2
0
def read_cat_ref(cat_file):
    """
    Parses a given refrence catalogue (in ascii format,see the header for details)
    output is Obspy catalogue object
    """
    cat_ref = np.loadtxt(cat_file,delimiter=',',skiprows=1)
    cat = Catalog()
    for i,e in enumerate(cat_ref):
        event = Event(resource_id='smi:local/='+str(i),creation_info='HG')
        origin = Origin()
        origin.time = UTCDateTime(int(e[2]),int(e[3]),int(e[4]),
                                  int(e[7]),int(e[8]),e[9])
        origin.longitude = e[0]
        origin.latitude = e[1]
        origin.depth = e[6] * 1000. #in meters
        event.origins.append(origin)
        if ~(np.isnan(e[10])):
            mag = Magnitude(creation_info='HER')
            mag.mag = e[10]
            mag.magnitude_type = 'Mw'
            event.magnitudes.append(mag)
        if ~(np.isnan(e[11])):
            mag = Magnitude(creation_info='MAR')
            mag.mag = e[11]
            mag.magnitude_type = 'Mw' 
            event.magnitudes.append(mag)
        if ~(np.isnan(e[12])):
            mag = Magnitude(creation_info='SIP')
            mag.mag = e[12]
            mag.magnitude_type = 'Mw' 
            event.magnitudes.append(mag)
        cat.append(event)
    return cat
Exemplo n.º 3
0
    def _parseRecordAH(self, line, event):
        """
        Parses the 'additional hypocenter' record AH
        """
        date = line[2:10]
        time = line[11:20]
        #unused: hypocenter_quality = line[20]
        latitude = self._float(line[21:27])
        lat_type = line[27]
        longitude = self._float(line[29:36])
        lon_type = line[36]
        #unused: preliminary_flag = line[37]
        depth = self._float(line[38:43])
        #unused: depth_quality = line[43]
        standard_dev = self._floatUnused(line[44:48])
        station_number = self._intUnused(line[48:51])
        phase_number = self._intUnused(line[51:55])
        source_code = line[56:60].strip()

        evid = event.resource_id.id.split('/')[-1]
        origin = Origin()
        res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower()))
        origin.resource_id = ResourceIdentifier(id=res_id)
        origin.creation_info = CreationInfo(agency_id=source_code)
        origin.time = UTCDateTime(date + time)
        origin.latitude = latitude * self._coordinateSign(lat_type)
        origin.longitude = longitude * self._coordinateSign(lon_type)
        origin.depth = depth * 1000
        origin.depth_type = 'from location'
        origin.quality = OriginQuality()
        origin.quality.standard_error = standard_dev
        origin.quality.used_station_count = station_number
        origin.quality.used_phase_count = phase_number
        origin.type = 'hypocenter'
        event.origins.append(origin)
Exemplo n.º 4
0
    def _parse_record_ah(self, line, event):
        """
        Parses the 'additional hypocenter' record AH
        """
        date = line[2:10]
        time = line[11:20]
        # unused: hypocenter_quality = line[20]
        latitude = self._float(line[21:27])
        lat_type = line[27]
        longitude = self._float(line[29:36])
        lon_type = line[36]
        # unused: preliminary_flag = line[37]
        depth = self._float(line[38:43])
        # unused: depth_quality = line[43]
        standard_dev = self._float_unused(line[44:48])
        station_number = self._int_unused(line[48:51])
        phase_number = self._int_unused(line[51:55])
        source_code = line[56:60].strip()

        evid = event.resource_id.id.split('/')[-1]
        origin = Origin()
        res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower()))
        origin.resource_id = ResourceIdentifier(id=res_id)
        origin.creation_info = CreationInfo(agency_id=source_code)
        origin.time = UTCDateTime(date + time)
        origin.latitude = latitude * self._coordinate_sign(lat_type)
        origin.longitude = longitude * self._coordinate_sign(lon_type)
        origin.depth = depth * 1000
        origin.depth_type = 'from location'
        origin.quality = OriginQuality()
        origin.quality.standard_error = standard_dev
        origin.quality.used_station_count = station_number
        origin.quality.used_phase_count = phase_number
        origin.origin_type = 'hypocenter'
        event.origins.append(origin)
Exemplo n.º 5
0
    def _parse_record_hy(self, line):
        """
        Parses the 'hypocenter' record HY
        """
        date = line[2:10]
        time = line[11:20]
        # unused: location_quality = line[20]
        latitude = self._float(line[21:27])
        lat_type = line[27]
        longitude = self._float(line[29:36])
        lon_type = line[36]
        depth = self._float(line[38:43])
        # unused: depth_quality = line[43]
        standard_dev = self._float(line[44:48])
        station_number = self._int(line[48:51])
        # unused: version_flag = line[51]
        fe_region_number = line[52:55]
        fe_region_name = self._decode_fe_region_number(fe_region_number)
        source_code = line[55:60].strip()

        event = Event()
        # FIXME: a smarter way to define evid?
        evid = date + time
        res_id = '/'.join((res_id_prefix, 'event', evid))
        event.resource_id = ResourceIdentifier(id=res_id)
        description = EventDescription(
            type='region name',
            text=fe_region_name)
        event.event_descriptions.append(description)
        description = EventDescription(
            type='Flinn-Engdahl region',
            text=fe_region_number)
        event.event_descriptions.append(description)
        origin = Origin()
        res_id = '/'.join((res_id_prefix, 'origin', evid))
        origin.resource_id = ResourceIdentifier(id=res_id)
        origin.creation_info = CreationInfo()
        if source_code:
            origin.creation_info.agency_id = source_code
        else:
            origin.creation_info.agency_id = 'USGS-NEIC'
        res_id = '/'.join((res_id_prefix, 'earthmodel/ak135'))
        origin.earth_model_id = ResourceIdentifier(id=res_id)
        origin.time = UTCDateTime(date + time)
        origin.latitude = latitude * self._coordinate_sign(lat_type)
        origin.longitude = longitude * self._coordinate_sign(lon_type)
        origin.depth = depth * 1000
        origin.depth_type = 'from location'
        origin.quality = OriginQuality()
        origin.quality.associated_station_count = station_number
        origin.quality.standard_error = standard_dev
        # associated_phase_count can be incremented in records 'P ' and 'S '
        origin.quality.associated_phase_count = 0
        # depth_phase_count can be incremented in record 'S '
        origin.quality.depth_phase_count = 0
        origin.origin_type = 'hypocenter'
        origin.region = fe_region_name
        event.origins.append(origin)
        return event
Exemplo n.º 6
0
    def _parse_record_hy(self, line):
        """
        Parses the 'hypocenter' record HY
        """
        date = line[2:10]
        time = line[11:20]
        # unused: location_quality = line[20]
        latitude = self._float(line[21:27])
        lat_type = line[27]
        longitude = self._float(line[29:36])
        lon_type = line[36]
        depth = self._float(line[38:43])
        # unused: depth_quality = line[43]
        standard_dev = self._float(line[44:48])
        station_number = self._int(line[48:51])
        # unused: version_flag = line[51]
        fe_region_number = line[52:55]
        fe_region_name = self._decode_fe_region_number(fe_region_number)
        source_code = line[55:60].strip()

        event = Event()
        # FIXME: a smarter way to define evid?
        evid = date + time
        res_id = '/'.join((res_id_prefix, 'event', evid))
        event.resource_id = ResourceIdentifier(id=res_id)
        description = EventDescription(
            type='region name',
            text=fe_region_name)
        event.event_descriptions.append(description)
        description = EventDescription(
            type='Flinn-Engdahl region',
            text=fe_region_number)
        event.event_descriptions.append(description)
        origin = Origin()
        res_id = '/'.join((res_id_prefix, 'origin', evid))
        origin.resource_id = ResourceIdentifier(id=res_id)
        origin.creation_info = CreationInfo()
        if source_code:
            origin.creation_info.agency_id = source_code
        else:
            origin.creation_info.agency_id = 'USGS-NEIC'
        res_id = '/'.join((res_id_prefix, 'earthmodel/ak135'))
        origin.earth_model_id = ResourceIdentifier(id=res_id)
        origin.time = UTCDateTime(date + time)
        origin.latitude = latitude * self._coordinate_sign(lat_type)
        origin.longitude = longitude * self._coordinate_sign(lon_type)
        origin.depth = depth * 1000
        origin.depth_type = 'from location'
        origin.quality = OriginQuality()
        origin.quality.associated_station_count = station_number
        origin.quality.standard_error = standard_dev
        # associated_phase_count can be incremented in records 'P ' and 'S '
        origin.quality.associated_phase_count = 0
        # depth_phase_count can be incremented in record 'S '
        origin.quality.depth_phase_count = 0
        origin.origin_type = 'hypocenter'
        origin.region = fe_region_name
        event.origins.append(origin)
        return event
Exemplo n.º 7
0
Arquivo: core.py Projeto: zurgeg/obspy
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 # no seconds involved
                 if len(comps) < 6:
                     utc_args = [int(v) for v in comps if v is not None]
                 # we also have to handle seconds
                 else:
                     utc_args = [
                         int(v) if v is not None else 0 for v in comps[:-1]
                     ]
                     # just leave float seconds as is
                     utc_args.append(comps[-1])
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         event.scope_resource_ids()
         catalog.append(event)
     return catalog
Exemplo n.º 8
0
Arquivo: core.py Projeto: Brtle/obspy
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 # no seconds involved
                 if len(comps) < 6:
                     utc_args = [int(v) for v in comps if v is not None]
                 # we also have to handle seconds
                 else:
                     utc_args = [int(v) if v is not None else 0
                                 for v in comps[:-1]]
                     # just leave float seconds as is
                     utc_args.append(comps[-1])
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         event.scope_resource_ids()
         catalog.append(event)
     return catalog
Exemplo n.º 9
0
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split("\n"):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split("\t", 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get("lon"))
         origin.latitude = self._str2num(values.get("lat"))
         depth = self._str2num(values.get("depth"))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get("z_err"))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get("h_err"))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = "horizontal uncertainty"
             origin.origin_uncertainty = ou
         year = self._str2num(values.get("year"))
         if year is not None:
             t_fields = ["year", "month", "day", "hour", "minute", "second"]
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 utc_args = [int(v) for v in comps if v is not None]
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get("mag"))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get("m_err"))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         catalog.append(event)
     return catalog
Exemplo n.º 10
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Exemplo n.º 11
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Exemplo n.º 12
0
def __toOrigin(parser, origin_el):
    """
    Parses a given origin etree element.

    :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser`
    :param parser: Open XMLParser object.
    :type origin_el: etree.element
    :param origin_el: origin element to be parsed.
    :return: A ObsPy :class:`~obspy.core.event.Origin` object.
    """
    global CURRENT_TYPE

    origin = Origin()
    origin.resource_id = ResourceIdentifier(
        prefix="/".join([RESOURCE_ROOT, "origin"]))

    # I guess setting the program used as the method id is fine.
    origin.method_id = "%s/location_method/%s/1" % (
        RESOURCE_ROOT, parser.xpath2obj('program', origin_el))
    if str(origin.method_id).lower().endswith("none"):
        origin.method_id = None

    # Standard parameters.
    origin.time, origin.time_errors = \
        __toTimeQuantity(parser, origin_el, "time")
    origin.latitude, origin_latitude_error = \
        __toFloatQuantity(parser, origin_el, "latitude")
    origin.longitude, origin_longitude_error = \
        __toFloatQuantity(parser, origin_el, "longitude")
    origin.depth, origin.depth_errors = \
        __toFloatQuantity(parser, origin_el, "depth")

    if origin_longitude_error:
        origin_longitude_error = origin_longitude_error["uncertainty"]
    if origin_latitude_error:
        origin_latitude_error = origin_latitude_error["uncertainty"]

    # Figure out the depth type.
    depth_type = parser.xpath2obj("depth_type", origin_el)

    # Map Seishub specific depth type to the QuakeML depth type.
    if depth_type == "from location program":
        depth_type = "from location"
    if depth_type is not None:
        origin.depth_type = depth_type

    # XXX: CHECK DEPTH ORIENTATION!!

    if CURRENT_TYPE == "seiscomp3":
        origin.depth *= 1000
        if origin.depth_errors.uncertainty:
            origin.depth_errors.uncertainty *= 1000
    else:
        # Convert to m.
        origin.depth *= -1000
        if origin.depth_errors.uncertainty:
            origin.depth_errors.uncertainty *= 1000

    # Earth model.
    earth_mod = parser.xpath2obj('earth_mod', origin_el, str)
    if earth_mod:
        earth_mod = earth_mod.split()
        earth_mod = ",".join(earth_mod)
        origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT,
                                                         earth_mod)

    if (origin_latitude_error is None or origin_longitude_error is None) and \
        CURRENT_TYPE not in ["seiscomp3", "toni"]:
        print "AAAAAAAAAAAAA"
        raise Exception

    if origin_latitude_error and origin_latitude_error:
        if CURRENT_TYPE in ["baynet", "obspyck"]:
            uncert = OriginUncertainty()
            if origin_latitude_error > origin_longitude_error:
                uncert.azimuth_max_horizontal_uncertainty = 0
            else:
                uncert.azimuth_max_horizontal_uncertainty = 90
            uncert.min_horizontal_uncertainty, \
                uncert.max_horizontal_uncertainty = \
                sorted([origin_longitude_error, origin_latitude_error])
            uncert.min_horizontal_uncertainty *= 1000.0
            uncert.max_horizontal_uncertainty *= 1000.0
            uncert.preferred_description = "uncertainty ellipse"
            origin.origin_uncertainty = uncert
        elif CURRENT_TYPE == "earthworm":
            uncert = OriginUncertainty()
            uncert.horizontal_uncertainty = origin_latitude_error
            uncert.horizontal_uncertainty *= 1000.0
            uncert.preferred_description = "horizontal uncertainty"
            origin.origin_uncertainty = uncert
        elif CURRENT_TYPE in ["seiscomp3", "toni"]:
            pass
        else:
            raise Exception

    # Parse the OriginQuality if applicable.
    if not origin_el.xpath("originQuality"):
        return origin

    origin_quality_el = origin_el.xpath("originQuality")[0]
    origin.quality = OriginQuality()
    origin.quality.associated_phase_count = \
        parser.xpath2obj("associatedPhaseCount", origin_quality_el, int)
    # QuakeML does apparently not distinguish between P and S wave phase
    # count. Some Seishub event files do.
    p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el,
                                     int)
    s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el,
                                     int)
    # Use both in case they are set.
    if p_phase_count is not None and s_phase_count is not None:
        phase_count = p_phase_count + s_phase_count
        # Also add two Seishub element file specific elements.
        origin.quality.p_used_phase_count = p_phase_count
        origin.quality.s_used_phase_count = s_phase_count
    # Otherwise the total usedPhaseCount should be specified.
    else:
        phase_count = parser.xpath2obj("usedPhaseCount", origin_quality_el,
                                       int)
    if p_phase_count is not None:
        origin.quality.setdefault("extra", AttribDict())
        origin.quality.extra.usedPhaseCountP = {
            'value': p_phase_count,
            'namespace': NAMESPACE
        }
    if s_phase_count is not None:
        origin.quality.setdefault("extra", AttribDict())
        origin.quality.extra.usedPhaseCountS = {
            'value': s_phase_count,
            'namespace': NAMESPACE
        }
    origin.quality.used_phase_count = phase_count

    associated_station_count = \
        parser.xpath2obj("associatedStationCount", origin_quality_el, int)
    used_station_count = parser.xpath2obj("usedStationCount",
                                          origin_quality_el, int)
    depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el,
                                         int)
    standard_error = parser.xpath2obj("standardError", origin_quality_el,
                                      float)
    azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float)
    secondary_azimuthal_gap = \
        parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float)
    ground_truth_level = parser.xpath2obj("groundTruthLevel",
                                          origin_quality_el, str)
    minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el,
                                        float)
    maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el,
                                        float)
    median_distance = parser.xpath2obj("medianDistance", origin_quality_el,
                                       float)
    if minimum_distance is not None:
        minimum_distance = kilometer2degrees(minimum_distance)
    if maximum_distance is not None:
        maximum_distance = kilometer2degrees(maximum_distance)
    if median_distance is not None:
        median_distance = kilometer2degrees(median_distance)

    if associated_station_count is not None:
        origin.quality.associated_station_count = associated_station_count
    if used_station_count is not None:
        origin.quality.used_station_count = used_station_count
    if depth_phase_count is not None:
        origin.quality.depth_phase_count = depth_phase_count
    if standard_error is not None and not math.isnan(standard_error):
        origin.quality.standard_error = standard_error
    if azimuthal_gap is not None:
        origin.quality.azimuthal_gap = azimuthal_gap
    if secondary_azimuthal_gap is not None:
        origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap
    if ground_truth_level is not None:
        origin.quality.ground_truth_level = ground_truth_level
    if minimum_distance is not None:
        origin.quality.minimum_distance = minimum_distance
    if maximum_distance is not None:
        origin.quality.maximum_distance = maximum_distance
    if median_distance is not None and not math.isnan(median_distance):
        origin.quality.median_distance = median_distance

    return origin
Exemplo n.º 13
0
    def _parseRecordDp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._intZero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + '.' + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == 'FX':
            orig_time_stderr = 'Fixed'
        else:
            orig_time_stderr =\
                self._floatWithFormat(orig_time_stderr, '2.1', scale)
        centroid_latitude = self._floatWithFormat(line[17:21], '4.2')
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinateSign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == 'FX':
            lat_stderr = 'Fixed'
        else:
            lat_stderr = self._floatWithFormat(lat_stderr, '3.2', scale)
        centroid_longitude = self._floatWithFormat(line[25:30], '5.2')
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinateSign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == 'FX':
            lon_stderr = 'Fixed'
        else:
            lon_stderr = self._floatWithFormat(lon_stderr, '3.2', scale)
        centroid_depth = self._floatWithFormat(line[34:38], '4.1')
        depth_stderr = line[38:40]
        if depth_stderr == 'FX' or depth_stderr == 'BD':
            depth_stderr = 'Fixed'
        else:
            depth_stderr = self._floatWithFormat(depth_stderr, '2.1', scale)
        station_number = self._intZero(line[40:43])
        component_number = self._intZero(line[43:46])
        station_number2 = self._intZero(line[46:48])
        component_number2 = self._intZero(line[48:51])
        #unused: half_duration = self._floatWithFormat(line[51:54], '3.1')
        moment = self._floatWithFormat(line[54:56], '2.1')
        moment_stderr = self._floatWithFormat(line[56:58], '2.1')
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split('/')[-1]
        #Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != '.':
            origin = Origin()
            res_id = '/'.join(
                (res_id_prefix, 'origin', evid, source_contributor.lower(),
                 'mw' + computation_type.lower()))
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info =\
                CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime('%Y%m%d')
            origin.time = UTCDateTime(date + centroid_origin_time)
            #Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._storeUncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
                origin.epicenter_fixed = True
            else:
                self._storeUncertainty(origin.latitude_errors,
                                       self._latErrToDeg(lat_stderr))
                self._storeUncertainty(
                    origin.longitude_errors,
                    self._lonErrToDeg(lon_stderr, origin.latitude))
            if depth_stderr == 'Fixed':
                origin.depth_type = 'operator assigned'
            else:
                origin.depth_type = 'from location'
                self._storeUncertainty(origin.depth_errors,
                                       depth_stderr,
                                       scale=1000)
            quality = OriginQuality()
            quality.used_station_count =\
                station_number + station_number2
            quality.used_phase_count =\
                component_number + component_number2
            origin.quality = quality
            origin.type = 'centroid'
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = '/'.join(
            (res_id_prefix, 'focalmechanism', evid, source_contributor.lower(),
             'mw' + computation_type.lower()))
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info =\
            CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            #this is required for QuakeML validation:
            res_id = '/'.join((res_id_prefix, 'no-origin'))
            moment_tensor.derived_origin_id =\
                ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = '/'.join(
            (res_id_prefix, 'momenttensor', evid, source_contributor.lower(),
             'mw' + computation_type.lower()))
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._storeUncertainty(moment_tensor.scalar_moment_errors,
                               moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == 'C':
            res_id = '/'.join((res_id_prefix, 'methodID=CMT'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #CMT algorithm uses long-period body waves,
            #very-long-period surface waves and
            #intermediate period surface waves (since 2004
            #for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = 'combined'
        if computation_type == 'M':
            res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: not sure which kind of data is used by
            #"moment tensor" algorithm.
            data_used.wave_type = 'unknown'
        elif computation_type == 'B':
            res_id = '/'.join((res_id_prefix, 'methodID=broadband_data'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: is 'combined' correct here?
            data_used.wave_type = 'combined'
        elif computation_type == 'F':
            res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = 'P waves'
        elif computation_type == 'S':
            res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: not sure which kind of data is used
            #for scalar moment determination.
            data_used.wave_type = 'unknown'
        moment_tensor.data_used = data_used
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism
# +
from obspy import UTCDateTime
from obspy.core.event import Catalog, Event, Origin, Magnitude
from obspy.geodetics import FlinnEngdahl

# cat = Catalog()
cat.description = "Just a fictitious toy example catalog built from scratch"

e = Event()
e.event_type = "not existing"

o = Origin()
o.time = UTCDateTime(2014, 2, 23, 18, 0, 0)
o.latitude = 47.6
o.longitude = 12.0
o.depth = 10000
o.depth_type = "operator assigned"
o.evaluation_mode = "manual"
o.evaluation_status = "preliminary"
o.region = FlinnEngdahl().get_region(o.longitude, o.latitude)

m = Magnitude()
m.mag = 7.2
m.magnitude_type = "Mw"

m2 = Magnitude()
m2.mag = 7.4
m2.magnitude_type = "Ms"

# also included could be: custom picks, amplitude measurements, station magnitudes,
# focal mechanisms, moment tensors, ...
Exemplo n.º 15
0
    def get_results(self):
        cids = []
        clusters = []
        results_file = "{}/{}".format(self.hypoDD_control.control_directory,
                              self.hypoDD_control.relocated_hypocenters_output
                              )
        residuals_file = "{}/{}".format(self.hypoDD_control.control_directory,
                                        self.hypoDD_control.data_residual_output
                                        )
        with open(results_file, "r") as f:
            for line in f:
                num = line.split()
                evid = num[0]
                lat = float(num[1])
                lon = float(num[2])
                dep = 1000 * float(num[3])  # km to m
                errx = num[7]
                erry = num[8]
                errz = num[9]
                yr = int(num[10])
                mo = int(num[11])
                dy = int(num[12])
                hr = int(num[13])
                mi = int(num[14])
                sc = float(num[15])
                mag = num[16]
                nccp = num[17]
                nccs = num[18]
                nctp = num[19]
                ncts = num[20]
                rcc = num[21]
                rct = num[22]
                cid = num[23]
                if cid not in cids:
                    cids.append(cid)
                    clusters.append(Cluster())
                    clusters[-1].hypoDD_id=cid
                    clusters[-1].successful_relocation=True
                    clusters[-1].catalog=Catalog()
                    clusters[-1].event_ids=[]
                origin=Origin()
                isec = int ( math.floor( sc ))
                micsec = int ( ( sc - isec) * 1000000 )
                origin.time = UTCDateTime(yr, mo, dy, hr, mi, isec, micsec)
                origin.longitude = lon
                origin.latitude = lat
                origin.depth = dep
                origin.method_id = "hypoDD"
                # TODO (@ogalanis): Add time/location errors (when
                # appropriate. Add quality and origin_uncertainty. Add arrivals.
                event=Event()
                event.creation_info=CreationInfo()
                event.creation_info.author = __package__
                event.creation_info.version = info.__version__
                event.origins=[origin]
                event.magnitude=Magnitude()
                event.magnitude.mag=mag
                idx=cids.index(cid)
                clusters[idx].catalog.events.append(event)
                clusters[idx].event_ids.append(evid)

        if self.hypoDD_control.cid != 0 :
            my_list = []
            clusters[0].connectedness = Connectedness()
            with open(residuals_file, "r") as f:
                for line in f:
                    num = line.split()
                    evid_1 = num[2]
                    evid_2 = num[3]
                    obs_type = num[4]
                    if obs_type == "1":
                        my_list = clusters[0].connectedness.cross_corr_P
                    elif obs_type == "2":
                        my_list = clusters[0].connectedness.cross_corr_S
                    elif obs_type == "3":
                        my_list = clusters[0].connectedness.catalog_P
                    elif obs_type == "4":
                        my_list = clusters[0].connectedness.catalog_S
                    else:
                        continue
                    in_list = [x for x in my_list if (( x[0] == evid_1 and
                                                        x[1] == evid_2
                                                        ) or
                                                      ( x[0] == evid_2 and
                                                        x[1] == evid_1
                                                        ))]
                    if in_list:
                        for x in my_list:
                            if (( x[0] == evid_1 and
                                  x[1] == evid_2
                                  ) or
                                ( x[0] == evid_2 and
                                  x[1] == evid_1
                                  )):
                                x[2] += 1
                    else:
                        my_list.append([evid_1,evid_2,1])

        return clusters
Exemplo n.º 16
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
        event = Event(focal_mechanisms=[], picks=[], origins=[])
        origin = Origin(arrivals=[])
        origin.time = UTCDateTime(hp.tstamp)
        origin.latitude = hp.qlat
        origin.longitude = hp.qlon
        origin.depth = hp.qdep
        origin.creation_info = CreationInfo(version=hp.icusp)
        origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(
            hp.icusp))
        for _i in range(n):
            p = Pick()
            p.creation_info = CreationInfo(version=hp.arid[_i])
            p.resource_id = ResourceIdentifier('smi:nsl/Pick/{0}'.format(
                p.creation_info.version))
            p.waveform_id = WaveformStreamID(network_code=hp.snet[_i],
                                             station_code=hp.sname[_i],
                                             channel_code=hp.scomp[_i])
            if hp.p_pol[_i] > 0:
                p.polarity = 'positive'
            else:
                p.polarity = 'negative'
            a = Arrival()
            a.creation_info = CreationInfo(version=hp.arid[_i])
            a.resource_id = ResourceIdentifier('smi:nsl/Arrival/{0}'.format(
                p.creation_info.version))
            a.azimuth = hp.p_azi_mc[_i, 0]
            a.takeoff_angle = 180. - hp.p_the_mc[_i, 0]
            a.pick_id = p.resource_id
            origin.arrivals.append(a)
            event.picks.append(p)
        event.origins.append(origin)
        event.preferred_origin_id = origin.resource_id.resource_id
    else:  # just update the changes
        origin = event.preferred_origin()
        picks = []
        arrivals = []
        for _i in range(n):
            ind = hp.p_index[_i]
            a = origin.arrivals[ind]
            p = a.pick_id.getReferredObject()
            a.takeoff_angle = hp.p_the_mc[_i, 0]
            picks.append(p)
            arrivals.append(a)
        if only_fm_picks:
            origin.arrivals = arrivals
            event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(),
                                                author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier(
            'smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s + 1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'],
                                                plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'],
                                                plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(
                hp.qual[s],
                resource_id=ResourceIdentifier(
                    focal_mech.resource_id.resource_id + '/comment/quality')))
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id
    return event
Exemplo n.º 17
0
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs):
    """
    Reads a NonLinLoc Hypocenter-Phase file to a
    :class:`~obspy.core.event.Catalog` object.

    .. note::

        Coordinate conversion from coordinate frame of NonLinLoc model files /
        location run to WGS84 has to be specified explicitly by the user if
        necessary.

    .. note::

        An example can be found on the :mod:`~obspy.io.nlloc` submodule front
        page in the documentation pages.

    :param filename: File or file-like object in text mode.
    :type coordinate_converter: func
    :param coordinate_converter: Function to convert (x, y, z)
        coordinates of NonLinLoc output to geographical coordinates and depth
        in meters (longitude, latitude, depth in kilometers).
        If left ``None``, NonLinLoc (x, y, z) output is left unchanged (e.g. if
        it is in geographical coordinates already like for NonLinLoc in
        global mode).
        The function should accept three arguments x, y, z (each of type
        :class:`numpy.ndarray`) and return a tuple of three
        :class:`numpy.ndarray` (lon, lat, depth in kilometers).
    :type picks: list of :class:`~obspy.core.event.Pick`
    :param picks: Original picks used to generate the NonLinLoc location.
        If provided, the output event will include the original picks and the
        arrivals in the output origin will link to them correctly (with their
        ``pick_id`` attribute). If not provided, the output event will include
        (the rather basic) pick information that can be reconstructed from the
        NonLinLoc hypocenter-phase file.
    :rtype: :class:`~obspy.core.event.Catalog`
    """
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    lines = data.splitlines()

    # remember picks originally used in location, if provided
    original_picks = picks
    if original_picks is None:
        original_picks = []

    # determine indices of block start/end of the NLLOC output file
    indices_hyp = [None, None]
    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("NLLOC "):
            indices_hyp[0] = i
        elif line.startswith("END_NLLOC"):
            indices_hyp[1] = i
        elif line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i
    if any([i is None for i in indices_hyp]):
        msg = ("NLLOC HYP file seems corrupt,"
               " could not detect 'NLLOC' and 'END_NLLOC' lines.")
        raise RuntimeError(msg)
    # strip any other lines around NLLOC block
    lines = lines[indices_hyp[0]:indices_hyp[1]]

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    # maximum likelihood origin location info line
    line = lines["HYPOCENTER"]

    x, y, z = map(float, line.split()[1:7:2])

    if coordinate_converter:
        x, y, z = coordinate_converter(x, y, z)

    # origin time info line
    line = lines["GEOGRAPHIC"]

    year, month, day, hour, minute = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, month, day, hour, minute, seconds)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_xx = float(line.split()[7])
    covariance_yy = float(line.split()[13])
    covariance_zz = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    cat = Catalog(events=[event])
    o = Origin()
    event.origins = [o]
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string))

    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version)
    event.creation_info.version = version
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
    except ValueError:
        if covariance_xx < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
    except ValueError:
        if covariance_yy < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        wid = WaveformStreamID(station_code=station)
        date, hourmin, sec = map(str, line[6:9])
        t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    return cat
Exemplo n.º 18
0
def _read_single_hypocenter(lines, coordinate_converter, original_picks):
    """
    Given a list of lines (starting with a 'NLLOC' line and ending with a
    'END_NLLOC' line), parse them into an Event.
    """
    try:
        # some paranoid checks..
        assert lines[0].startswith("NLLOC ")
        assert lines[-1].startswith("END_NLLOC")
        for line in lines[1:-1]:
            assert not line.startswith("NLLOC ")
            assert not line.startswith("END_NLLOC")
    except Exception:
        msg = ("This should not have happened, please report this as a bug at "
               "https://github.com/obspy/obspy/issues.")
        raise Exception(msg)

    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines[:-1]])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    # new NLLoc > 6.0 seems to add prefix 'run:' before date
    if date.startswith('run:'):
        date = date[4:]
    signature = signature.strip()
    creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    if coordinate_converter:
        # maximum likelihood origin location in km info line
        line = lines["HYPOCENTER"]
        x, y, z = coordinate_converter(*map(float, line.split()[1:7:2]))
    else:
        # maximum likelihood origin location lon lat info line
        line = lines["GEOGRAPHIC"]
        y, x, z = map(float, line.split()[8:13:2])

    # maximum likelihood origin time info line
    line = lines["GEOGRAPHIC"]

    year, mon, day, hour, min = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_xx = float(line.split()[7])
    covariance_yy = float(line.split()[13])
    covariance_zz = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    if "COMMENT" in lines:
        comment = lines["COMMENT"].strip()
        comment = comment.strip('\'"')
        comment = comment.strip()

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    o = Origin()
    event.origins = [o]
    event.preferred_origin_id = o.resource_id
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string, force_resource_id=False))
    event.comments.append(Comment(text=comment, force_resource_id=False))

    # SIGNATURE field's first item is LOCSIG, which is supposed to be
    # 'Identification of an individual, institiution or other entity'
    # according to
    # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_
    # so use it as author in creation info
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version,
                                       author=signature)
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version,
                                   author=signature)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
    except ValueError:
        if covariance_xx < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
    except ValueError:
        if covariance_yy < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        # network codes are not used by NonLinLoc, so they can not be known
        # when reading the .hyp file.. to conform with QuakeML standard set an
        # empty network code
        wid = WaveformStreamID(network_code="", station_code=station)
        # have to split this into ints for overflow to work correctly
        date, hourmin, sec = map(str, line[6:9])
        ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])]
        hm = [int(hourmin[:2]), int(hourmin[2:4])]
        t = UTCDateTime(*(ymd + hm), strict=False) + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    event.scope_resource_ids()

    return event
Exemplo n.º 19
0
def getwf_iris_ncedc_llnl(origin0, client_pick):

    print("Running function getwf_iris_ncedc_llnl")

    # parameters for waveform request
    tbefore_sec = 100
    tafter_sec = 600

    # DEFAULT SETTINGS (see getwaveform_iris.py)
    rotateRTZ = True
    rotateUVW = False  # works only if 'rotateRTZ = True'
    output_cap_weight_file = True
    detrend = True
    demean = True
    output_event_info = True
    taper = False
    ifplot_spectrogram = False

    # for CAP all waveforms need to have the same sample rate
    resample_TF = True
    resample_freq = 20.0  # 0 for no resampling
    scale_factor = 10**2  # for CAP use 10**2  (to convert m/s to cm/s)

    # event parameters
    sec_before_after_event = 10  # time window to search for a target event in a catalog
    min_dist = 0
    max_dist = 1200

    # station parameters
    # 20170321 [email protected] -- I disabled retrieving data for the
    # networks listed below
    # reason 1 some are dense/local and don't improve station coverage
    # reason 2 many have crappy data (noisy)
    # reason 3 some have very large amplitudes. Bad response?
    network = '*,-XK,-XM,-XS,-XC,-XU,-XT,-XE'
    station = '*,-PURD,-NV33,-GPO'  # all stations
    channel = 'BH?,LH?'

    overwrite_ddir = 1  # 1 = delete data directory if it already exists
    icreateNull = 0  # create Null traces so that rotation can work (obsby stream.rotate require 3 traces)

    # filter
    # set ipre_filt = 0 to prevent extra filtering
    ifFilter = False
    filter_type = 'bandpass'
    # LLNL filter 10-50 sec is most stable. Some waveforms >= 100 sec show
    # oscillations. Use factor of 2 limit = 200 sec for prefilter
    f1 = 1 / 200
    f2 = 1 / 10
    zerophase = True  # False = causal, True = acausal
    corners = 4  # Is corner in Obspy same as Pole in SAC?
    remove_response = True
    iplot_response = False
    ipre_filt = 2  # 0 No pre_filter
    # 1 default pre_filter (see getwaveform_iris.py)
    # 2 user-defined pre_filter
    f0 = 0.5 * f1
    f3 = 2.0 * f2
    # The following are for the FMTU paper
    f0 = 0.005
    f1 = 0.006
    f3 = 10
    f4 = 15
    pre_filt = (f0, f1, f2, f3)  # applies for ipre_filt = 2 only

    # NOTE event data from user-defined catalog!
    # initialize objects
    ev = Event()
    org = Origin()
    mag = Magnitude()

    # build objects
    org.time = UTCDateTime(origin0[0])
    org.longitude = origin0[1]
    org.latitude = origin0[2]
    org.depth = origin0[3]
    mag.mag = origin0[5]
    mag.magnitude_type = origin0[6]  # Mw, ml, mb, ...

    ev.origins.append(org)
    ev.magnitudes.append(mag)

    # Delete existing data directory
    eid = util_helpers.otime2eid(ev.origins[0].time)
    ddir = './' + eid
    if os.path.exists('RAW'):
        print("WARNING. %s already exists. Deleting ..." % ddir)
        shutil.rmtree('RAW')
    if overwrite_ddir and os.path.exists(ddir):
        print("WARNING. %s already exists. Deleting ..." % ddir)
        shutil.rmtree(ddir)

    if client_pick is "IRIS":
        print('Using client %s' % client_pick)
        idb = 1
        client_list = ["IRIS", "NCEDC"]
        print("WARNING using event data from user-defined catalog")

    # LLNL
    if client_pick is "LLNL":
        print('Using client %s' % client_pick)
        idb = 3
        client_list = ["LLNL"]

        client = llnl_db_client.LLNLDBClient(
            "/store/raw/LLNL/UCRL-MI-222502/westernus.wfdisc")
        # get event time and event ID
        otime = obspy.UTCDateTime(origin0[0])
        cat = client.get_catalog()
        mintime_str = "time > %s" % (otime - sec_before_after_event)
        maxtime_str = "time < %s" % (otime + sec_before_after_event)
        print(mintime_str + "\n" + maxtime_str)
        ev = cat.filter(mintime_str, maxtime_str)
        nev = len(ev)
        if nev == 1:
            ev = ev[0]  # doesn't have magnitude (ATRISCO)
        elif nev > 1:
            ev = ev[1]  # [0] may not include magnitude. [1] may (LLNL)
        else:
            print("No events in the catalog for the given time period. Stop.")

    # The IRIS requests include BK data, but it has to be requested through
    # the NCEDC client
    for iclient in client_list:
        if iclient is "IRIS":
            network = network
            client = Client(iclient)
        elif iclient is "NCEDC":
            network = 'BK'
            station = '*'  # doesn't like "-staX"
            client = Client(iclient)

        try:
            gw.run_get_waveform(c=client,
                                event=ev,
                                idb=idb,
                                ref_time_place=ev,
                                min_dist=min_dist,
                                max_dist=max_dist,
                                before=tbefore_sec,
                                after=tafter_sec,
                                network=network,
                                station=station,
                                channel=channel,
                                resample_freq=resample_freq,
                                ifrotateRTZ=rotateRTZ,
                                ifrotateUVW=rotateUVW,
                                ifCapInp=output_cap_weight_file,
                                ifRemoveResponse=remove_response,
                                ifDetrend=detrend,
                                ifDemean=demean,
                                Taper=taper,
                                ifEvInfo=output_event_info,
                                scale_factor=scale_factor,
                                icreateNull=icreateNull,
                                ipre_filt=ipre_filt,
                                pre_filt=pre_filt,
                                ifFilter=ifFilter,
                                fmin=f1,
                                fmax=f2,
                                filter_type=filter_type,
                                zerophase=zerophase,
                                corners=corners,
                                iplot_response=iplot_response,
                                ifplot_spectrogram=ifplot_spectrogram)
        except:
            print("~~~~~ SOMETHING HAPPENED ~~~~~~~~~~")
            print(ev, client)
            print("Continuing")
            continue
Exemplo n.º 20
0
def par2quakeml(Par_filename, QuakeML_filename, rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5, origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr ** 2 + Mtt ** 2 + Mpp ** 2 +
                                            Mtr ** 2 + Mpr ** 2 + Mtp ** 2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
Exemplo n.º 21
0
    def build(self):
        """
        Build an obspy moment tensor focal mech event

        This makes the tensor output into an Event containing:
        1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
        2) a Magnitude of the Mw from the Tensor

        Which is what we want for outputting QuakeML using
        the (slightly modified) obspy code.

        Input
        -----
        filehandle => open file OR str from filehandle.read()

        Output
        ------
        event => instance of Event() class as described above
        """
        p = self.parser
        event         = Event(event_type='earthquake')
        origin        = Origin()
        focal_mech    = FocalMechanism()
        nodal_planes  = NodalPlanes()
        moment_tensor = MomentTensor()
        principal_ax  = PrincipalAxes()
        magnitude     = Magnitude()
        data_used     = DataUsed()
        creation_info = CreationInfo(agency_id='NN')
        ev_mode = 'automatic'
        ev_stat = 'preliminary'
        evid = None
        orid = None
        # Parse the entire file line by line.
        for n,l in enumerate(p.line):
            if 'REVIEWED BY NSL STAFF' in l:
                ev_mode = 'manual'
                ev_stat = 'reviewed'
            if 'Event ID' in l:
                evid = p._id(n)
            if 'Origin ID' in l:
                orid = p._id(n)
            if 'Ichinose' in l:
                moment_tensor.category = 'regional'
            if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
                ev = p._event_info(n)
            if 'Depth' in l:
                derived_depth = p._depth(n)
            if 'Mw' in l:
                magnitude.mag = p._mw(n) 
                magnitude.magnitude_type = 'Mw'
            if 'Mo' in l and 'dyne' in l:
                moment_tensor.scalar_moment = p._mo(n)
            if 'Percent Double Couple' in l:
                moment_tensor.double_couple = p._percent(n)
            if 'Percent CLVD' in l:
                moment_tensor.clvd = p._percent(n)
            if 'Epsilon' in l:
                moment_tensor.variance = p._epsilon(n)
            if 'Percent Variance Reduction' in l:
                moment_tensor.variance_reduction = p._percent(n)
            if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
                np = p._double_couple(n)
                nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
                nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
                nodal_planes.preferred_plane = 1
            if 'Spherical Coordinates' in l:
                mt = p._mt_sphere(n)
                moment_tensor.tensor = Tensor(
                    m_rr = mt['Mrr'],
                    m_tt = mt['Mtt'],
                    m_pp = mt['Mff'],
                    m_rt = mt['Mrt'],
                    m_rp = mt['Mrf'],
                    m_tp = mt['Mtf'],
                    )
            if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
                ax = p._vectors(n)
                principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
                principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
                principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
            if 'Number of Stations' in l:
                data_used.station_count = p._number_of_stations(n)
            if 'Maximum' in l and 'Gap' in l:
                focal_mech.azimuthal_gap = p._gap(n)
            if re.match(r'^Date', l):
                creation_info.creation_time = p._creation_time(n)
        # Creation Time
        creation_info.version = orid
        # Fill in magnitude values
        magnitude.evaluation_mode = ev_mode
        magnitude.evaluation_status = ev_stat
        magnitude.creation_info = creation_info.copy()
        magnitude.resource_id = self._rid(magnitude)
        # Stub origin
        origin.time = ev.get('time')
        origin.latitude = ev.get('lat')
        origin.longitude = ev.get('lon')
        origin.depth = derived_depth * 1000.
        origin.depth_type = "from moment tensor inversion"
        origin.creation_info = creation_info.copy()
         # Unique from true origin ID
        _oid = self._rid(origin)
        origin.resource_id = ResourceIdentifier(str(_oid) + '/mt')
        del _oid
        # Make an id for the MT that references this origin
        ogid = str(origin.resource_id)
        doid = ResourceIdentifier(ogid, referred_object=origin)
        # Make an id for the moment tensor mag which references this mag
        mrid = str(magnitude.resource_id)
        mmid = ResourceIdentifier(mrid, referred_object=magnitude)
        # MT todo: could check/use URL for RID if parsing the php file
        moment_tensor.evaluation_mode = ev_mode
        moment_tensor.evaluation_status = ev_stat
        moment_tensor.data_used = data_used
        moment_tensor.moment_magnitude_id = mmid
        moment_tensor.derived_origin_id = doid
        moment_tensor.creation_info = creation_info.copy()
        moment_tensor.resource_id = self._rid(moment_tensor)
        # Fill in focal_mech values
        focal_mech.nodal_planes  = nodal_planes
        focal_mech.moment_tensor = moment_tensor
        focal_mech.principal_axes = principal_ax
        focal_mech.creation_info = creation_info.copy()
        focal_mech.resource_id = self._rid(focal_mech)
        # add mech and new magnitude to event
        event.focal_mechanisms = [focal_mech]
        event.magnitudes = [magnitude]
        event.origins = [origin]
        event.creation_info = creation_info.copy()
        # If an MT was done, that's the preferred mag/mech
        event.preferred_magnitude_id = str(magnitude.resource_id)
        event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
        if evid:
            event.creation_info.version = evid
        event.resource_id = self._rid(event)
        self.event = event
def main(phases):
    from obspy.io.xseed import Parser
    from obspy.core.event import Event, Origin, Magnitude, NodalPlane
    from obspy.core import read, UTCDateTime
    from obspy.clients.fdsn import Client
    from obspy import read_events

    stas_templates = {
        'ALL': [
            'POHA', 'PET', 'MAJO', 'BILL', 'YSS', 'MA2', 'MDJ', 'YAK', 'INCN',
            'TIXI', 'AIS', 'CASY', 'QSPA', 'DRV', 'SBA', 'DZM', 'RAR', 'MSVF',
            'AFI', 'HNR', 'WMQ', 'BRVK', 'KMI', 'AAK', 'CHTO', 'ABKT', 'FURI',
            'KMBO', 'MSEY'
        ]
    }

    sta_adjtime = {}
    stas = {}
    for phase in phases:
        stas[phase] = stas_templates['ALL']

    #stalocs = {'KDAK':'10'}
    stalocs = {}
    mseed_filename = '../data/2004-11-11-mw75-timor-region-2.miniseed'
    client = Client('IRIS')
    #client = Client('http://localhost:8080')
    #client = Client('http://compute2.rses.anu.edu.au:8080')
    DO_SYNTH = True
    if DO_SYNTH and ('R' in phases or 'L' in phases):
        print 'Initialize surface wave calculations'
        surfw_excite('./java ')
        readhrv(source_dir + '/surfw/ETL-JGR97 ')
        read_gdm52(source_dir + '/surfw/GDM52/')
        read_de06(source_dir + '/surfw/DE06a/')

    event_name = 'Alor_20041111'

    FAULT_FROM_PKL = False
    if os.path.isfile('event.pkl'):
        event = pickle.load(open('event.pkl', 'r'))
        print "SAYA DI SANA!"
    else:
        event = read_events('./quakeml.xml')[0]
        pickle.dump(event, open('event.pkl', 'w'))
        print "SAYA DI SINI!"

    org = event.preferred_origin()
    if org is None:
        org = event.origins[0]

    if FAULT_FROM_PKL:
        fault = pickle.load(open('fault_neic.pkl', 'r'))
    else:
        fault = neic2fault('p000d85g.param',
                           org,
                           xdr=-0.375,
                           velmod=read_velmod('./velmod.txt'),
                           NEIC_potency=True)

    # Set parameters for deconvolution and time windowing
    windows = {
        'R': (2.75, 4.5),
        'L': (3.25, 5.5),  # Group velocity windows for surface waves
        'P': (5, 75.),
        'SH': (5, 85.)
    }  # Times before and after body wave phases
    frq4 = {}
    for key in ['R', 'L']:
        frq4[key] = [0.002, 0.004, 0.008, 0.0125]  # Surface wave filter
    for key in ['P', 'SH']:
        frq4[key] = [0.004, 0.01, .25, 0.5]  # Body wave filter

    origin = Origin()
    origin.time = UTCDateTime(org.time)
    origin.latitude = fault.hypo.y
    origin.depth = fault.hypo.z
    origin.longitude = fault.hypo.x
    fault.ntw = 1
    #stobs = getobs_surf(client,org,phases,frq4,gvel_win,stas,stalocs)
    print 'WAKTU=', origin.time

    stobs, inv = getobs(mseed_filename,
                        client,
                        event,
                        phases,
                        frq4,
                        windows,
                        stas,
                        stalocs,
                        picks=get_isctimes('isc.txt', org.time),
                        taper=0.2,
                        adjtime=sta_adjtime)
    #return stobs,inv
    # write out the station inventory
    inv.write('%s.inv' % event_name, format="STATIONXML")

    # Calculate the subfault bodywave Green's functions
    for phase in ['P', 'SH']:
        if not phase in phases:
            continue
        if phase in phases:
            for sbf in fault.subfaults:
                sft1, sft2 = np.diff(sbf.ruptimes()[0:3])
                bodywave(sbf,
                         stobs[phase],
                         phase,
                         sft1,
                         sft2,
                         frq4=frq4[phase])

    if not FAULT_FROM_PKL:
        pickle.dump(fault, open('fault_neic.pkl', 'w'))

    figs = {}
    ntrc = {'P': 0, 'SH': 0, 'R': 0, 'L': 0}
    if 'R' in stobs.keys() or 'L' in stobs.keys():
        stobs_path = './stobs_surf_%s.pkl' % event_name
        print 'Pickling stream to %s\n' % stobs_path
        stobs_surf = {'params': stobs['params']}
        for key in stobs.keys():
            if key == 'R' or key == 'L':
                ntrc[key] = len(stobs[key])
                stobs_surf[key] = stobs[key]
        pickle.dump(stobs_surf, open(stobs_path, "wb"))
        figs['Surf'] = plt.figure(figsize=(12., 12.))
    if 'P' in stobs.keys() or 'SH' in stobs.keys():
        stobs_path = './stobs_body_%s.pkl' % event_name
        print 'Pickling stream to %s\n' % stobs_path
        stobs_body = {'params': stobs['params']}
        for key in stobs.keys():
            if key == 'P' or key == 'SH':
                ntrc[key] = len(stobs[key])
                stobs_body[key] = stobs[key]
        pickle.dump(stobs_body, open(stobs_path, "wb"))
        figs['Body'] = plt.figure(figsize=(12., 12.))

#    """
### Plot the data
    npl_col = 2  # One for body and one for surface waves
    ax_wvf = {}
    th = 10.
    print windows['P']
    if 'P' in stobs.keys() or 'SH' in stobs.keys():
        print "WINDOWS[P][0]", windows['P'][0]
        stref = bodysynth_fault(fault, stobs, windows['P'][0])
    else:
        stref = {}
    ##stref,d5 = surfsynth_fault(fault,th,org.time,stobs['R'],frq4,gvel_win['R'])
    mxtrc = max(ntrc['R'], ntrc['L'])
    ncol = mxtrc / 11 + 1
    nrow = min(11, mxtrc)
    print mxtrc, nrow, ncol
    if 'R' in stobs.keys():
        plt.figure(figs['Surf'].number)
        phs = 'R'
        #.return stobs
        stref[phs], d5 = surfsynth_fault(fault, th, org.time, stobs[phs],
                                         frq4[phs], windows[phs])
        return stobs, stref
        ax_wvf['R'] = plt.subplot2grid((2, npl_col), (0, 0),
                                       rowspan=2,
                                       colspan=1)
        plot_datacomp(ax_wvf['R'],
                      stobs, ['R'],
                      stref,
                      origin,
                      rowcol=(nrow, ncol),
                      yscale=0.25,
                      ttick=500.)
        plt.show()
    if 'L' in stobs.keys():
        plt.figure(figs['Surf'].number)
        phs = 'L'
        stref[phs], d5 = surfsynth_fault(fault, th, org.time, stobs[phs],
                                         frq4[phs], windows[phs])
        ax_wvf['L'] = plt.subplot2grid((2, npl_col), (0, 1),
                                       rowspan=2,
                                       colspan=1)
        plot_datacomp(ax_wvf['L'],
                      stobs, ['L'],
                      stref,
                      origin,
                      rowcol=(nrow, ncol, 5),
                      yscale=0.25,
                      ttick=500.)

    mxtrc = max(ntrc['P'], ntrc['SH'])
    ncol = mxtrc / 11 + 1
    nrow = min(11, mxtrc)
    print mxtrc, nrow, ncol
    sys.exit(0)
    if 'P' in stobs.keys():
        plt.figure(figs['Body'].number)
        #stref = None
        ax_wvf['P'] = plt.subplot2grid((2, npl_col), (0, 0),
                                       rowspan=2,
                                       colspan=1)
        plot_datacomp(ax_wvf['P'],
                      stobs, ['P'],
                      stref,
                      origin,
                      rowcol=(nrow, ncol),
                      yscale=0.25,
                      ttick=100.)
    if 'SH' in stobs.keys():
        plt.figure(figs['Body'].number)
        ax_wvf['SH'] = plt.subplot2grid((2, npl_col), (0, 1),
                                        rowspan=2,
                                        colspan=1)
        plot_datacomp(ax_wvf['SH'],
                      stobs, ['SH'],
                      stref,
                      origin,
                      rowcol=(nrow, ncol),
                      yscale=0.25,
                      ttick=100.)

    if 'P' in stobs.keys() or 'SH' in stobs.keys():
        plt.figure(figs['Body'].number)
        plt.tight_layout(pad=5., w_pad=0.5, h_pad=.0)
        plt.savefig('getdata_PSH.pdf')
    if 'R' in stobs.keys() or 'L' in stobs.keys():
        plt.figure(figs['Surf'].number)
        plt.tight_layout(pad=5., w_pad=0.5, h_pad=.0)
        plt.savefig('getdata_RL.pdf')

    plt.show()
    #    """
    return (fault, stref, stobs)  #,d0,M
Exemplo n.º 23
0
    def _parse_first_line_origin(self, line, event, magnitudes):
        """
        Parse the first line of origin data.

        :type line: str
        :param line: Line to parse.
        :type event: :class:`~obspy.core.event.event.Event`
        :param event: Event of the origin.
        :type magnitudes: list of
            :class:`~obspy.core.event.magnitude.Magnitude`
        :param magnitudes: Store magnitudes in a list to keep
            their positions.
        :rtype: :class:`~obspy.core.event.origin.Origin`,
            :class:`~obspy.core.event.resourceid.ResourceIdentifier`
        :returns: Parsed origin or None, resource identifier of the
            origin.
        """
        magnitude_types = []
        magnitude_values = []
        magnitude_station_counts = []

        fields = self.fields['line_1']

        time_origin = line[fields['time']].strip()
        time_fixed_flag = line[fields['time_fixf']].strip()
        latitude = line[fields['lat']].strip()
        longitude = line[fields['lon']].strip()
        epicenter_fixed_flag = line[fields['epicenter_fixf']].strip()
        depth = line[fields['depth']].strip()
        depth_fixed_flag = line[fields['depth_fixf']].strip()
        phase_count = line[fields['n_def']].strip()
        station_count = line[fields['n_sta']].strip()
        azimuthal_gap = line[fields['gap']].strip()
        magnitude_types.append(line[fields['mag_type_1']].strip())
        magnitude_values.append(line[fields['mag_1']].strip())
        magnitude_station_counts.append(line[fields['mag_n_sta_1']].strip())
        magnitude_types.append(line[fields['mag_type_2']].strip())
        magnitude_values.append(line[fields['mag_2']].strip())
        magnitude_station_counts.append(line[fields['mag_n_sta_2']].strip())
        magnitude_types.append(line[fields['mag_type_3']].strip())
        magnitude_values.append(line[fields['mag_3']].strip())
        magnitude_station_counts.append(line[fields['mag_n_sta_3']].strip())
        author = line[fields['author']].strip()
        origin_id = line[fields['id']].strip()

        origin = Origin()
        origin.quality = OriginQuality()

        try:
            origin.time = UTCDateTime(time_origin.replace('/', '-'))
            origin.latitude = float(latitude)
            origin.longitude = float(longitude)
        except (TypeError, ValueError):
            self._warn('Missing origin data, skipping event')
            return None, None

        origin.time_fixed = time_fixed_flag.lower() == 'f'
        origin.epicenter_fixed = epicenter_fixed_flag.lower() == 'f'

        try:
            # Convert value from km to m
            origin.depth = float(depth) * 1000
        except ValueError:
            pass
        try:
            origin.depth_type = DEPTH_TYPES[depth_fixed_flag]
        except KeyError:
            origin.depth_type = OriginDepthType('from location')
        try:
            origin.quality.used_phase_count = int(phase_count)
            origin.quality.associated_phase_count = int(phase_count)
        except ValueError:
            pass
        try:
            origin.quality.used_station_count = int(station_count)
            origin.quality.associated_station_count = int(station_count)
        except ValueError:
            pass
        try:
            origin.quality.azimuthal_gap = float(azimuthal_gap)
        except ValueError:
            pass

        self.author = author
        origin.creation_info = self._get_creation_info()

        public_id = "origin/%s" % origin_id
        origin_res_id = self._get_res_id(public_id)

        for i in range(3):
            try:
                magnitude = Magnitude()
                magnitude.creation_info = self._get_creation_info()
                magnitude.magnitude_type = magnitude_types[i]
                magnitude.mag = float(magnitude_values[i])
                magnitude.station_count = int(magnitude_station_counts[i])
                magnitude.origin_id = origin_res_id
                magnitudes.append(magnitude)
                event.magnitudes.append(magnitude)
            except ValueError:
                # Magnitude can be empty but we need to keep the
                # position between mag1, mag2 or mag3.
                magnitudes.append(None)

        return origin, origin_res_id
def __toOrigin(parser, origin_el):
    """
    Parses a given origin etree element.

    :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser`
    :param parser: Open XMLParser object.
    :type origin_el: etree.element
    :param origin_el: origin element to be parsed.
    :return: A ObsPy :class:`~obspy.core.event.Origin` object.
    """
    global CURRENT_TYPE

    origin = Origin()
    origin.resource_id = ResourceIdentifier(prefix="/".join([RESOURCE_ROOT, "origin"]))

    # I guess setting the program used as the method id is fine.
    origin.method_id = "%s/location_method/%s/1" % (RESOURCE_ROOT,
        parser.xpath2obj('program', origin_el))
    if str(origin.method_id).lower().endswith("none"):
        origin.method_id = None

    # Standard parameters.
    origin.time, origin.time_errors = \
        __toTimeQuantity(parser, origin_el, "time")
    origin.latitude, origin_latitude_error = \
        __toFloatQuantity(parser, origin_el, "latitude")
    origin.longitude, origin_longitude_error = \
        __toFloatQuantity(parser, origin_el, "longitude")
    origin.depth, origin.depth_errors = \
        __toFloatQuantity(parser, origin_el, "depth")

    if origin_longitude_error:
        origin_longitude_error = origin_longitude_error["uncertainty"]
    if origin_latitude_error:
        origin_latitude_error = origin_latitude_error["uncertainty"]

    # Figure out the depth type.
    depth_type = parser.xpath2obj("depth_type", origin_el)

    # Map Seishub specific depth type to the QuakeML depth type.
    if depth_type == "from location program":
        depth_type = "from location"
    if depth_type is not None:
        origin.depth_type = depth_type

    # XXX: CHECK DEPTH ORIENTATION!!

    if CURRENT_TYPE == "seiscomp3":
        origin.depth *= 1000
        if origin.depth_errors.uncertainty:
            origin.depth_errors.uncertainty *= 1000
    else:
        # Convert to m.
        origin.depth *= -1000
        if origin.depth_errors.uncertainty:
            origin.depth_errors.uncertainty *= 1000

    # Earth model.
    earth_mod = parser.xpath2obj('earth_mod', origin_el, str)
    if earth_mod:
        earth_mod = earth_mod.split()
        earth_mod = ",".join(earth_mod)
        origin.earth_model_id = "%s/earth_model/%s/1" % (RESOURCE_ROOT,
            earth_mod)

    if (origin_latitude_error is None or origin_longitude_error is None) and \
        CURRENT_TYPE not in ["seiscomp3", "toni"]:
        print "AAAAAAAAAAAAA"
        raise Exception

    if origin_latitude_error and origin_latitude_error:
        if CURRENT_TYPE in ["baynet", "obspyck"]:
            uncert = OriginUncertainty()
            if origin_latitude_error > origin_longitude_error:
                uncert.azimuth_max_horizontal_uncertainty = 0
            else:
                uncert.azimuth_max_horizontal_uncertainty = 90
            uncert.min_horizontal_uncertainty, \
                uncert.max_horizontal_uncertainty = \
                sorted([origin_longitude_error, origin_latitude_error])
            uncert.min_horizontal_uncertainty *= 1000.0
            uncert.max_horizontal_uncertainty *= 1000.0
            uncert.preferred_description = "uncertainty ellipse"
            origin.origin_uncertainty = uncert
        elif CURRENT_TYPE == "earthworm":
            uncert = OriginUncertainty()
            uncert.horizontal_uncertainty = origin_latitude_error
            uncert.horizontal_uncertainty *= 1000.0
            uncert.preferred_description = "horizontal uncertainty"
            origin.origin_uncertainty = uncert
        elif CURRENT_TYPE in ["seiscomp3", "toni"]:
            pass
        else:
            raise Exception

    # Parse the OriginQuality if applicable.
    if not origin_el.xpath("originQuality"):
        return origin

    origin_quality_el = origin_el.xpath("originQuality")[0]
    origin.quality = OriginQuality()
    origin.quality.associated_phase_count = \
        parser.xpath2obj("associatedPhaseCount", origin_quality_el, int)
    # QuakeML does apparently not distinguish between P and S wave phase
    # count. Some Seishub event files do.
    p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el,
                                     int)
    s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el,
                                     int)
    # Use both in case they are set.
    if p_phase_count is not None and s_phase_count is not None:
        phase_count = p_phase_count + s_phase_count
        # Also add two Seishub element file specific elements.
        origin.quality.p_used_phase_count = p_phase_count
        origin.quality.s_used_phase_count = s_phase_count
    # Otherwise the total usedPhaseCount should be specified.
    else:
        phase_count = parser.xpath2obj("usedPhaseCount",
                                       origin_quality_el, int)
    if p_phase_count is not None:
        origin.quality.setdefault("extra", AttribDict())
        origin.quality.extra.usedPhaseCountP = {'value': p_phase_count,
                                                'namespace': NAMESPACE}
    if s_phase_count is not None:
        origin.quality.setdefault("extra", AttribDict())
        origin.quality.extra.usedPhaseCountS = {'value': s_phase_count,
                                                'namespace': NAMESPACE}
    origin.quality.used_phase_count = phase_count

    associated_station_count = \
        parser.xpath2obj("associatedStationCount", origin_quality_el, int)
    used_station_count = parser.xpath2obj("usedStationCount",
        origin_quality_el, int)
    depth_phase_count = parser.xpath2obj("depthPhaseCount", origin_quality_el,
        int)
    standard_error = parser.xpath2obj("standardError", origin_quality_el,
        float)
    azimuthal_gap = parser.xpath2obj("azimuthalGap", origin_quality_el, float)
    secondary_azimuthal_gap = \
        parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float)
    ground_truth_level = parser.xpath2obj("groundTruthLevel",
        origin_quality_el, str)
    minimum_distance = parser.xpath2obj("minimumDistance", origin_quality_el,
        float)
    maximum_distance = parser.xpath2obj("maximumDistance", origin_quality_el,
        float)
    median_distance = parser.xpath2obj("medianDistance", origin_quality_el,
        float)
    if minimum_distance is not None:
        minimum_distance = kilometer2degrees(minimum_distance)
    if maximum_distance is not None:
        maximum_distance = kilometer2degrees(maximum_distance)
    if median_distance is not None:
        median_distance = kilometer2degrees(median_distance)

    if associated_station_count is not None:
        origin.quality.associated_station_count = associated_station_count
    if used_station_count is not None:
        origin.quality.used_station_count = used_station_count
    if depth_phase_count is not None:
        origin.quality.depth_phase_count = depth_phase_count
    if standard_error is not None and not math.isnan(standard_error):
        origin.quality.standard_error = standard_error
    if azimuthal_gap is not None:
        origin.quality.azimuthal_gap = azimuthal_gap
    if secondary_azimuthal_gap is not None:
        origin.quality.secondary_azimuthal_gap = secondary_azimuthal_gap
    if ground_truth_level is not None:
        origin.quality.ground_truth_level = ground_truth_level
    if minimum_distance is not None:
        origin.quality.minimum_distance = minimum_distance
    if maximum_distance is not None:
        origin.quality.maximum_distance = maximum_distance
    if median_distance is not None and not math.isnan(median_distance):
        origin.quality.median_distance = median_distance

    return origin
Exemplo n.º 25
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
	event = Event(focal_mechanisms=[], picks=[], origins=[])
	origin = Origin(arrivals=[])
	origin.time = UTCDateTime(hp.tstamp)
	origin.latitude = hp.qlat
	origin.longitude = hp.qlon
	origin.depth = hp.qdep
	origin.creation_info = CreationInfo(version=hp.icusp)
	origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(hp.icusp))
	for _i in range(n):
	    p = Pick()
	    p.creation_info = CreationInfo(version=hp.arid[_i])
	    p.resource_id = ResourceIdentifier('smi:hash/Pick/{0}'.format(p.creation_info.version))
	    p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i])
	    if hp.p_pol[_i] > 0:
		p.polarity = 'positive'
	    else:
		p.polarity = 'negative'
	    a = Arrival()
	    a.creation_info = CreationInfo(version=hp.arid[_i])
	    a.resource_id = ResourceIdentifier('smi:hash/Arrival/{0}'.format(p.creation_info.version))
	    a.azimuth = hp.p_azi_mc[_i,0]
	    a.takeoff_angle = 180. - hp.p_the_mc[_i,0]
	    a.pick_id = p.resource_id
	    origin.arrivals.append(a)
	    event.picks.append(p)
	event.origins.append(origin)
	event.preferred_origin_id = str(origin.resource_id)
    else: # just update the changes
	origin = event.preferred_origin()
	picks = []
	arrivals = []
	for _i in range(n):
	    ind = hp.p_index[_i]
	    a = origin.arrivals[ind]
	    p = a.pick_id.getReferredObject()
	    a.takeoff_angle = hp.p_the_mc[_i,0]
	    picks.append(p)
	    arrivals.append(a)
	if only_fm_picks:
	    origin.arrivals = arrivals
	    event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier('smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s+1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(hp.qual[s], resource_id=ResourceIdentifier(str(focal_mech.resource_id) + '/comment/quality'))
            )
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
    return event
Exemplo n.º 26
0
        print evnum, ev['datetime']

        # convert datetime object to UTCdatetime
        dt = utcdatetime.UTCDateTime(ev['datetime'])
        #start_time=utcdatetime.UTCDateTime(yr,mon,day,hr,mn,int(sec),int((sec-int(sec))*100000))

        # Build event object
        evnt = Event(resource_id='GG_cat_' + str(evnum + 1),
                     creation_info='AU')

        origin = Origin()
        origin.time = ev['datetime']
        origin.longitude = ev['lon']
        origin.latitude = ev['lat']
        origin.depth = ev['dep']

        evnt.origins.append(origin)

        mag = Magnitude(creation_info='GG_cat')
        mag.mag = ev['prefmag']
        mag.magnitude_type = ev['prefmagtype']

        evnt.magnitudes.append(mag)
        ''' the time window to request the data will be 20 minutes, check maximum travel time and increase this value accordingly '''
        #end_time=start_time+960 # 16 minutes
        start_time = dt - datetime.timedelta(seconds=60)
        end_time = dt + datetime.timedelta(seconds=960)  # 16 minutes
        #end_time   = dt + datetime.timedelta(seconds=600) # 5 minutes
        ''' get all waveform data available, use wildcards to reduce the data volume and speedup the process,
        unfortunately we need to request few times for every number of characters that forms the station name '''
Exemplo n.º 27
0
        cat = client.get_events(starttime = otime - sec_before_after_event,\
                                endtime = otime + sec_before_after_event)
        ev = cat[0]
        
        ref_time_place = ev
        if rlat != dummyval:
            ref_time_place.origins[0].latitude = rlat
            ref_time_place.origins[0].longitude = rlon
            ref_time_place.origins[0].time = rtime 
    else:
        print("WARNING using event data from user-defined catalog")
        ev = Event()
        org = Origin()
        org.latitude = elat
        org.longitude = elon
        org.depth = edep
        org.time = otime
        mag = Magnitude()
        mag.mag = emag
        mag.magnitude_type = "Mw"
        ev.origins.append(org)
        ev.magnitudes.append(mag)

        if rlat == dummyval:
            # By default this should be the event time and location unless we want to grab stations centered at another location
            rlat = elat
            rlon = elon
            rtime = otime
        
        ref_time_place = Event()
        ref_org = Origin()
Exemplo n.º 28
0
        #        ''' correct the time if not UTC '''
        # Is this really needed??? Looks like may already be converted??
        #        if code=="AEST" :
        #            start_time-=36000
        #        if code=="WITA" or code=="AWST":
        #            start_time-=28800

        # Build event object
        event = Event(resource_id='GG_cat_' + str(event_num),
                      creation_info='JG')
        event_num += 1
        origin = Origin()
        origin.time = start_time
        origin.longitude = lon
        origin.latitude = lat
        origin.depth = dep
        event.origins.append(origin)
        mag = Magnitude(creation_info='GG_cat')
        mag.mag = mag_pref
        mag.magnitude_type = mag_type
        event.magnitudes.append(mag)
        ''' the time window to request the data will be 20 minutes, check maximum travel time and increase this value accordingly '''
        end_time = start_time + 1200  # 20 minutes - to catch ~Rayleigh waves at 20 degrees distance (assume v>=3km/s)
        ''' get all waveform data available, use wildcards to reduce the data volume and speedup the process,
        unfortunately we need to request few times for every number of characters that forms the station name '''
        st_3 = client.get_waveforms("AU", "???", "", "[BS]?[ENZ]", start_time,
                                    end_time)
        st_4 = client.get_waveforms("AU", "????", "", "[BS]?[ENZ]", start_time,
                                    end_time)
        if len(st_4) > 0:
            st = st_3 + st_4
Exemplo n.º 29
0
    def _parse_first_line_origin(self, line, event, magnitudes):
        """
        Parse the first line of origin data.

        :type line: str
        :param line: Line to parse.
        :type event: :class:`~obspy.core.event.event.Event`
        :param event: Event of the origin.
        :type magnitudes: list of
            :class:`~obspy.core.event.magnitude.Magnitude`
        :param magnitudes: Store magnitudes in a list to keep
            their positions.
        :rtype: :class:`~obspy.core.event.origin.Origin`,
            :class:`~obspy.core.event.resourceid.ResourceIdentifier`
        :returns: Parsed origin or None, resource identifier of the
            origin.
        """
        magnitude_types = []
        magnitude_values = []
        magnitude_station_counts = []

        fields = self.fields['line_1']

        time_origin = line[fields['time']].strip()
        time_fixed_flag = line[fields['time_fixf']].strip()
        latitude = line[fields['lat']].strip()
        longitude = line[fields['lon']].strip()
        epicenter_fixed_flag = line[fields['epicenter_fixf']].strip()
        depth = line[fields['depth']].strip()
        depth_fixed_flag = line[fields['depth_fixf']].strip()
        phase_count = line[fields['n_def']].strip()
        station_count = line[fields['n_sta']].strip()
        azimuthal_gap = line[fields['gap']].strip()
        magnitude_types.append(line[fields['mag_type_1']].strip())
        magnitude_values.append(line[fields['mag_1']].strip())
        magnitude_station_counts.append(line[fields['mag_n_sta_1']].strip())
        magnitude_types.append(line[fields['mag_type_2']].strip())
        magnitude_values.append(line[fields['mag_2']].strip())
        magnitude_station_counts.append(line[fields['mag_n_sta_2']].strip())
        magnitude_types.append(line[fields['mag_type_3']].strip())
        magnitude_values.append(line[fields['mag_3']].strip())
        magnitude_station_counts.append(line[fields['mag_n_sta_3']].strip())
        author = line[fields['author']].strip()
        origin_id = line[fields['id']].strip()

        origin = Origin()
        origin.quality = OriginQuality()

        try:
            origin.time = UTCDateTime(time_origin.replace('/', '-'))
            origin.latitude = float(latitude)
            origin.longitude = float(longitude)
        except (TypeError, ValueError):
            self._warn('Missing origin data, skipping event')
            return None, None

        origin.time_fixed = time_fixed_flag.lower() == 'f'
        origin.epicenter_fixed = epicenter_fixed_flag.lower() == 'f'

        try:
            # Convert value from km to m
            origin.depth = float(depth) * 1000
        except ValueError:
            pass
        try:
            origin.depth_type = DEPTH_TYPES[depth_fixed_flag]
        except KeyError:
            origin.depth_type = OriginDepthType('from location')
        try:
            origin.quality.used_phase_count = int(phase_count)
            origin.quality.associated_phase_count = int(phase_count)
        except ValueError:
            pass
        try:
            origin.quality.used_station_count = int(station_count)
            origin.quality.associated_station_count = int(station_count)
        except ValueError:
            pass
        try:
            origin.quality.azimuthal_gap = float(azimuthal_gap)
        except ValueError:
            pass

        self.author = author
        origin.creation_info = self._get_creation_info()

        public_id = "origin/%s" % origin_id
        origin_res_id = self._get_res_id(public_id)

        for i in range(3):
            try:
                magnitude = Magnitude()
                magnitude.creation_info = self._get_creation_info()
                magnitude.magnitude_type = magnitude_types[i]
                magnitude.mag = float(magnitude_values[i])
                magnitude.station_count = int(magnitude_station_counts[i])
                magnitude.origin_id = origin_res_id
                magnitudes.append(magnitude)
                event.magnitudes.append(magnitude)
            except ValueError:
                # Magnitude can be empty but we need to keep the
                # position between mag1, mag2 or mag3.
                magnitudes.append(None)

        return origin, origin_res_id
Exemplo n.º 30
0
def par2quakeml(Par_filename,
                QuakeML_filename,
                rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5,
                origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr**2 + Mtt**2 + Mpp**2 + Mtr**2 +
                                            Mpr**2 + Mtp**2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
Exemplo n.º 31
0
    eid = line_elements[-1]   # Fix to take microseconds
    otime = uh.eid2otime(eid)
    elat = line_elements[7]
    elon = line_elements[6]
    edep = float(line_elements[8]) * 1000.0 # in meters
    emag = line_elements[16] 
    #for line in lines:
    print(otime, elat, elon, edep, emag)
    
    # Create event object
    client = Client("IRIS")
    ev = Event()
    org = Origin()
    org.latitude = elat
    org.longitude = elon
    org.depth = edep
    org.time = otime
    mag = Magnitude()
    mag.mag = emag
    mag.magnitude_type = "Mw"
    ev.origins.append(org)
    ev.magnitudes.append(mag)

    # Extract waveforms 
    getwaveform_iris.run_get_waveform(c = client, event = ev, 
                                  min_dist = min_dist, max_dist = max_dist, 
                                  before = tbefore_sec, after = tafter_sec, 
                                  network = network, station = station, channel = channel, 
                                  resample_freq = resample_freq, ifrotate = rotate,
                                  ifCapInp = output_cap_weight_file, 
                                  ifRemoveResponse = remove_response,
Exemplo n.º 32
0
Arquivo: core.py Projeto: bmorg/obspy
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs):
    """
    Reads a NonLinLoc Hypocenter-Phase file to a
    :class:`~obspy.core.event.Catalog` object.

    .. note::

        Coordinate conversion from coordinate frame of NonLinLoc model files /
        location run to WGS84 has to be specified explicitly by the user if
        necessary.

    .. note::

        An example can be found on the :mod:`~obspy.nlloc` submodule front
        page in the documentation pages.

    :param filename: File or file-like object in text mode.
    :type coordinate_converter: func
    :param coordinate_converter: Function to convert (x, y, z)
        coordinates of NonLinLoc output to geographical coordinates and depth
        in meters (longitude, latitude, depth in kilometers).
        If left `None` NonLinLoc (x, y, z) output is left unchanged (e.g. if
        it is in geographical coordinates already like for NonLinLoc in
        global mode).
        The function should accept three arguments x, y, z and return a
        tuple of three values (lon, lat, depth in kilometers).
    :type picks: list of :class:`~obspy.core.event.Pick`
    :param picks: Original picks used to generate the NonLinLoc location.
        If provided, the output event will include the original picks and the
        arrivals in the output origin will link to them correctly (with their
        `pick_id` attribute). If not provided, the output event will include
        (the rather basic) pick information that can be reconstructed from the
        NonLinLoc hypocenter-phase file.
    :rtype: :class:`~obspy.core.event.Catalog`
    """
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    lines = data.splitlines()

    # remember picks originally used in location, if provided
    original_picks = picks
    if original_picks is None:
        original_picks = []

    # determine indices of block start/end of the NLLOC output file
    indices_hyp = [None, None]
    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("NLLOC "):
            indices_hyp[0] = i
        elif line.startswith("END_NLLOC"):
            indices_hyp[1] = i
        elif line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i
    if any([i is None for i in indices_hyp]):
        msg = ("NLLOC HYP file seems corrupt,"
               " could not detect 'NLLOC' and 'END_NLLOC' lines.")
        raise RuntimeError(msg)
    # strip any other lines around NLLOC block
    lines = lines[indices_hyp[0]:indices_hyp[1]]

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    # maximum likelihood origin location info line
    line = lines["HYPOCENTER"]

    x, y, z = map(float, line.split()[1:7:2])

    if coordinate_converter:
        x, y, z = coordinate_converter(x, y, z)

    # origin time info line
    line = lines["GEOGRAPHIC"]

    year, month, day, hour, minute = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, month, day, hour, minute, seconds)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_XX = float(line.split()[7])
    covariance_YY = float(line.split()[13])
    covariance_ZZ = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    cat = Catalog(events=[event])
    o = Origin()
    event.origins = [o]
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string))

    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version)
    event.creation_info.version = version
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_XX))
    except ValueError:
        if covariance_XX < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_YY))
    except ValueError:
        if covariance_YY < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_ZZ) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        wid = WaveformStreamID(station_code=station)
        date, hourmin, sec = map(str, line[6:9])
        t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    return cat
# NOTE this loop sends a request for each line in the catalog (63 events)
for line in lines:
    line_elements = line.split()
    eid = line_elements[-1]
    otime = uh.eid2otime(eid)
    elon = line_elements[6]
    elat = line_elements[7]
    edep = float(line_elements[8]) * 1000.0  # meters
    emag = line_elements[16]

    # create event object
    orig = Origin()
    orig.longitude = elon
    orig.latitude = elat
    orig.depth = edep
    orig.time = otime
    mag = Magnitude()
    mag.mag = emag
    mag.magnitude_type = "Mw"
    ev = Event()
    ev.origins.append(orig)
    ev.magnitudes.append(mag)

    if send_request:
        # get waveforms
        client = Client("IRIS")
        getwaveform_iris.run_get_waveform(c=client,
                                          event=ev,
                                          min_dist=min_dist,
                                          max_dist=max_dist,
def __toOrigin(parser, origin_el):
    """
    Parses a given origin etree element.

    :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser`
    :param parser: Open XMLParser object.
    :type origin_el: etree.element
    :param origin_el: origin element to be parsed.
    :return: A ObsPy :class:`~obspy.core.event.Origin` object.
    """
    origin = Origin()

    # I guess setting the program used as the method id is fine.
    origin.method_id = parser.xpath2obj('program', origin_el)

    # Standard parameters.
    origin.time, origin.time_errors = \
        __toTimeQuantity(parser, origin_el, "time")
    origin.latitude, origin.latitude_errors = \
        __toFloatQuantity(parser, origin_el, "latitude")
    origin.longitude, origin.longitude_errors = \
        __toFloatQuantity(parser, origin_el, "longitude")
    origin.depth, origin.depth_errors = \
        __toFloatQuantity(parser, origin_el, "depth")

    # Figure out the depth type.
    depth_type = parser.xpath2obj("depth_type", origin_el, str)
    # Map Seishub specific depth type to the QuakeML depth type.
    if depth_type == "from location program":
        depth_type == "from location"
    origin.depth_type = "from location"

    # Earth model.
    origin.earth_model_id = parser.xpath2obj("earth_mod", origin_el, str)

    # Parse th origin uncertainty. Rather verbose but should cover all cases.
    pref_desc = parser.xpath2obj("originUncertainty/preferredDescription",
                                 origin_el, str)
    hor_uncert = parser.xpath2obj("originUncertainty/horizontalUncertainty",
                                  origin_el, float)
    min_hor_uncert = parser.xpath2obj(\
        "originUncertainty/minHorizontalUncertainty", origin_el, float)
    max_hor_uncert = parser.xpath2obj(\
        "originUncertainty/maxHorizontalUncertainty", origin_el, float)
    azi_max_hor_uncert = parser.xpath2obj(\
        "originUncertainty/azimuthMaxHorizontalUncertainty", origin_el, float)
    origin_uncert = {}
    if pref_desc:
        origin_uncert["preferred_description"] = pref_desc
    if hor_uncert:
        origin_uncert["horizontal_uncertainty"] = hor_uncert
    if min_hor_uncert:
        origin_uncert["min_horizontal_uncertainty"] = min_hor_uncert
    if max_hor_uncert:
        origin_uncert["max_horizontal_uncertainty"] = max_hor_uncert
    if azi_max_hor_uncert:
        origin_uncert["azimuth_max_horizontal_uncertainty"] = \
        azi_max_hor_uncert

    if origin_uncert:
        origin.origin_uncertainty = origin_uncert

    # Parse the OriginQuality if applicable.
    if not origin_el.xpath("originQuality"):
        return origin

    origin_quality_el = origin_el.xpath("originQuality")[0]
    origin.quality = OriginQuality()
    origin.quality.associated_phase_count = \
        parser.xpath2obj("associatedPhaseCount", origin_quality_el, int)
    # QuakeML does apparently not distinguish between P and S wave phase
    # count. Some Seishub event files do.
    p_phase_count = parser.xpath2obj("P_usedPhaseCount", origin_quality_el,
                                     int)
    s_phase_count = parser.xpath2obj("S_usedPhaseCount", origin_quality_el,
                                     int)
    # Use both in case they are set.
    if p_phase_count and s_phase_count:
        phase_count = p_phase_count + s_phase_count
        # Also add two Seishub element file specific elements.
        origin.quality.p_used_phase_count = p_phase_count
        origin.quality.s_used_phase_count = s_phase_count
    # Otherwise the total usedPhaseCount should be specified.
    else:
        phase_count = parser.xpath2obj("usedPhaseCount",
                                       origin_quality_el, int)
    origin.quality.used_phase_count = phase_count

    origin.quality.associated_station_count = \
        parser.xpath2obj("associatedStationCount", origin_quality_el, int)
    origin.quality.used_station_count = \
        parser.xpath2obj("usedStationCount", origin_quality_el, int)
    origin.quality.depth_phase_count = \
        parser.xpath2obj("depthPhaseCount", origin_quality_el, int)
    origin.quality.standard_error = \
        parser.xpath2obj("standardError", origin_quality_el, float)
    origin.quality.azimuthal_gap = \
        parser.xpath2obj("azimuthalGap", origin_quality_el, float)
    origin.quality.secondary_azimuthal_gap = \
        parser.xpath2obj("secondaryAzimuthalGap", origin_quality_el, float)
    origin.quality.ground_truth_level = \
        parser.xpath2obj("groundTruthLevel", origin_quality_el, float)
    origin.quality.minimum_distance = \
        parser.xpath2obj("minimumDistance", origin_quality_el, float)
    origin.quality.maximum_distance = \
        parser.xpath2obj("maximumDistance", origin_quality_el, float)
    origin.quality.median_distance = \
        parser.xpath2obj("medianDistance", origin_quality_el, float)

    return origin
Exemplo n.º 35
0
def _read_single_event(event_file, locate_dir, units, local_mag_ph):
    """
    Parse an event file from QuakeMigrate into an obspy Event object.

    Parameters
    ----------
    event_file : `pathlib.Path` object
        Path to .event file to read.
    locate_dir : `pathlib.Path` object
        Path to locate directory (contains "events", "picks" etc. directories).
    units : {"km", "m"}
        Grid projection coordinates for QM LUT (determines units of depths and
        uncertainties in the .event files).
    local_mag_ph : {"S", "P"}
        Amplitude measurement used to calculate local magnitudes.

    Returns
    -------
    event : `obspy.Event` object
        Event object populated with all available information output by
        :class:`~quakemigrate.signal.scan.locate()`, including event locations
        and uncertainties, picks, and amplitudes and magnitudes if available.

    """

    # Parse information from event file
    event_info = pd.read_csv(event_file).iloc[0]
    event_uid = str(event_info["EventID"])

    # Set distance conversion factor (from units of QM LUT projection units).
    if units == "km":
        factor = 1e3
    elif units == "m":
        factor = 1
    else:
        raise AttributeError(f"units must be 'km' or 'm'; not {units}")

    # Create event object to store origin and pick information
    event = Event()
    event.extra = AttribDict()
    event.resource_id = str(event_info["EventID"])
    event.creation_info = CreationInfo(author="QuakeMigrate",
                                       version=quakemigrate.__version__)

    # Add COA info to extra
    event.extra.coa = {"value": event_info["COA"], "namespace": ns}
    event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns}
    event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns}
    event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns}
    event.extra.dec_coa_norm = {
        "value": event_info["DEC_COA_NORM"],
        "namespace": ns
    }

    # Determine location of cut waveform data - add to event object as a
    # custom extra attribute.
    mseed = locate_dir / "raw_cut_waveforms" / event_uid
    event.extra.cut_waveforms_file = {
        "value": str(mseed.with_suffix(".m").resolve()),
        "namespace": ns
    }
    if (locate_dir / "real_cut_waveforms").exists():
        mseed = locate_dir / "real_cut_waveforms" / event_uid
        event.extra.real_cut_waveforms_file = {
            "value": str(mseed.with_suffix(".m").resolve()),
            "namespace": ns
        }
    if (locate_dir / "wa_cut_waveforms").exists():
        mseed = locate_dir / "wa_cut_waveforms" / event_uid
        event.extra.wa_cut_waveforms_file = {
            "value": str(mseed.with_suffix(".m").resolve()),
            "namespace": ns
        }

    # Create origin with spline location and set to preferred event origin.
    origin = Origin()
    origin.method_id = "spline"
    origin.longitude = event_info["X"]
    origin.latitude = event_info["Y"]
    origin.depth = event_info["Z"] * factor
    origin.time = UTCDateTime(event_info["DT"])
    event.origins = [origin]
    event.preferred_origin_id = origin.resource_id

    # Create origin with gaussian location and associate with event
    origin = Origin()
    origin.method_id = "gaussian"
    origin.longitude = event_info["GAU_X"]
    origin.latitude = event_info["GAU_Y"]
    origin.depth = event_info["GAU_Z"] * factor
    origin.time = UTCDateTime(event_info["DT"])
    event.origins.append(origin)

    ouc = OriginUncertainty()
    ce = ConfidenceEllipsoid()
    ce.semi_major_axis_length = event_info["COV_ErrY"] * factor
    ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor
    ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor
    ce.major_axis_plunge = 0
    ce.major_axis_azimuth = 0
    ce.major_axis_rotation = 0
    ouc.confidence_ellipsoid = ce
    ouc.preferred_description = "confidence ellipsoid"

    # Set uncertainties for both as the gaussian uncertainties
    for origin in event.origins:
        origin.longitude_errors.uncertainty = kilometer2degrees(
            event_info["GAU_ErrX"] * factor / 1e3)
        origin.latitude_errors.uncertainty = kilometer2degrees(
            event_info["GAU_ErrY"] * factor / 1e3)
        origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor
        origin.origin_uncertainty = ouc

    # Add OriginQuality info to each origin?
    for origin in event.origins:
        origin.origin_type = "hypocenter"
        origin.evaluation_mode = "automatic"

    # --- Handle picks file ---
    pick_file = locate_dir / "picks" / event_uid
    if pick_file.with_suffix(".picks").is_file():
        picks = pd.read_csv(pick_file.with_suffix(".picks"))
    else:
        return None

    for _, pickline in picks.iterrows():
        station = str(pickline["Station"])
        phase = str(pickline["Phase"])
        wid = WaveformStreamID(network_code="", station_code=station)

        for method in ["modelled", "autopick"]:
            pick = Pick()
            pick.extra = AttribDict()
            pick.waveform_id = wid
            pick.method_id = method
            pick.phase_hint = phase
            if method == "autopick" and str(pickline["PickTime"]) != "-1":
                pick.time = UTCDateTime(pickline["PickTime"])
                pick.time_errors.uncertainty = float(pickline["PickError"])
                pick.extra.snr = {
                    "value": float(pickline["SNR"]),
                    "namespace": ns
                }
            elif method == "modelled":
                pick.time = UTCDateTime(pickline["ModelledTime"])
            else:
                continue
            event.picks.append(pick)

    # --- Handle amplitudes file ---
    amps_file = locate_dir / "amplitudes" / event_uid
    if amps_file.with_suffix(".amps").is_file():
        amps = pd.read_csv(amps_file.with_suffix(".amps"))

        i = 0
        for _, ampsline in amps.iterrows():
            wid = WaveformStreamID(seed_string=ampsline["id"])
            noise_amp = ampsline["Noise_amp"] / 1000  # mm to m
            for phase in ["P_amp", "S_amp"]:
                amp = Amplitude()
                if pd.isna(ampsline[phase]):
                    continue
                amp.generic_amplitude = ampsline[phase] / 1000  # mm to m
                amp.generic_amplitude_errors.uncertainty = noise_amp
                amp.unit = "m"
                amp.type = "AML"
                amp.method_id = phase
                amp.period = 1 / ampsline[f"{phase[0]}_freq"]
                amp.time_window = TimeWindow(
                    reference=UTCDateTime(ampsline[f"{phase[0]}_time"]))
                # amp.pick_id = ?
                amp.waveform_id = wid
                # amp.filter_id = ?
                amp.magnitude_hint = "ML"
                amp.evaluation_mode = "automatic"
                amp.extra = AttribDict()
                try:
                    amp.extra.filter_gain = {
                        "value": ampsline[f"{phase[0]}_filter_gain"],
                        "namespace": ns
                    }
                    amp.extra.avg_amp = {
                        "value": ampsline[f"{phase[0]}_avg_amp"] / 1000,  # m
                        "namespace": ns
                    }
                except KeyError:
                    pass

                if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]):
                    i += 1
                    stat_mag = StationMagnitude()
                    stat_mag.extra = AttribDict()
                    # stat_mag.origin_id = ? local_mag_loc
                    stat_mag.mag = ampsline["ML"]
                    stat_mag.mag_errors.uncertainty = ampsline["ML_Err"]
                    stat_mag.station_magnitude_type = "ML"
                    stat_mag.amplitude_id = amp.resource_id
                    stat_mag.extra.picked = {
                        "value": ampsline["is_picked"],
                        "namespace": ns
                    }
                    stat_mag.extra.epi_dist = {
                        "value": ampsline["epi_dist"],
                        "namespace": ns
                    }
                    stat_mag.extra.z_dist = {
                        "value": ampsline["z_dist"],
                        "namespace": ns
                    }

                    event.station_magnitudes.append(stat_mag)

                event.amplitudes.append(amp)

        mag = Magnitude()
        mag.extra = AttribDict()
        mag.mag = event_info["ML"]
        mag.mag_errors.uncertainty = event_info["ML_Err"]
        mag.magnitude_type = "ML"
        # mag.origin_id = ?
        mag.station_count = i
        mag.evaluation_mode = "automatic"
        mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns}

        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id

    return event
Exemplo n.º 36
0
def request_gcmt(starttime,
                 endtime,
                 minmagnitude=None,
                 mindepth=None,
                 maxdepth=None,
                 minlatitude=None,
                 maxlatitude=None,
                 minlongitude=None,
                 maxlongitude=None):
    from mechanize import Browser
    import re
    """
    Description
    I am using mechanize. My attempt is just preliminary, for the current
    globalcmt.org site. It is possible to store Moment Tensor information
    in the catalog file.
    """

    # Split numbers and text
    r = re.compile("([a-zA-Z]+)([0-9]+)")
    br = Browser()
    br.open('http://www.globalcmt.org/CMTsearch.html')
    # Site has just one form
    br.select_form(nr=0)

    br.form['yr'] = str(starttime.year)
    br.form['mo'] = str(starttime.month)
    br.form['day'] = str(starttime.day)
    br.form['oyr'] = str(endtime.year)
    br.form['omo'] = str(endtime.month)
    br.form['oday'] = str(endtime.day)
    br.form['list'] = ['4']
    br.form['itype'] = ['ymd']
    br.form['otype'] = ['ymd']

    if minmagnitude:
        br.form['lmw'] = str(minmagnitude)
    if minlatitude:
        br.form['llat'] = str(minlatitude)
    if maxlatitude:
        br.form['ulat'] = str(maxlatitude)
    if minlongitude:
        br.form['llon'] = str(minlongitude)
    if maxlongitude:
        br.form['ulon'] = str(maxlongitude)
    if mindepth:
        br.form['lhd'] = str(mindepth)
    if maxdepth:
        br.form['uhd'] = str(maxdepth)

    print("Submitting parameters to globalcmt.org ")
    req = br.submit()
    print("Retrieving data, creating catalog.")

    data = []
    for line in req:
        data.append(line)

    data_chunked = _chunking_list(keyword='\n', list=data)
    origins = []
    magnitudes = []
    tensor = []

    for line in data_chunked:
        for element in line:
            if 'event name' in element:
                try:
                    org = line[1].split()
                    year = int(r.match(org[0]).groups()[1])
                    mon = int(org[1])
                    day = int(org[2])
                    hour = int(org[3])
                    minute = int(org[4])
                    sec_temp = int(org[5].split('.')[0])
                    msec_temp = int(org[5].split('.')[1])

                except:
                    org = line[1].split()
                    year = int(org[1])
                    mon = int(org[2])
                    day = int(org[3])
                    hour = int(org[4])
                    minute = int(org[5])
                    sec_temp = int(org[6].split('.')[0])
                    msec_temp = int(org[6].split('.')[1])

                origins_temp = UTCDateTime(year, mon, day, hour, minute,
                                           sec_temp, msec_temp)
                # adding time shift located in line[3]
                origin = origins_temp + float(line[3].split()[2])
                magnitude = float(line[1].split()[10])
                latitude = float(line[5].split()[1])
                longitude = float(line[6].split()[1])
                depth = 1000. * float(line[7].split()[1])
                m_rr = float(line[8].split()[1])
                m_tt = float(line[9].split()[1])
                m_pp = float(line[10].split()[1])
                m_rt = float(line[11].split()[1])
                m_rp = float(line[12].split()[1])
                m_tp = float(line[13].split()[1])

                magnitudes.append(("Mw", magnitude))
                origins.append((latitude, longitude, depth, origin))
                tensor.append((m_rr, m_tt, m_pp, m_rt, m_rp, m_tp))

    cat = Catalog()

    for mag, org, ten in zip(magnitudes, origins, tensor):
        # Create magnitude object.
        magnitude = Magnitude()
        magnitude.magnitude_type = mag[0]
        magnitude.mag = mag[1]
        # Write origin object.
        origin = Origin()
        origin.latitude = org[0]
        origin.longitude = org[1]
        origin.depth = org[2]
        origin.time = org[3]
        # Create event object and append to catalog object.
        event = Event()
        event.magnitudes.append(magnitude)
        event.origins.append(origin)

        event.MomentTensor = MomentTensor()
        event.MomentTensor.m_rr = ten[0]
        event.MomentTensor.m_tt = ten[1]
        event.MomentTensor.m_pp = ten[2]
        event.MomentTensor.m_rt = ten[3]
        event.MomentTensor.m_rp = ten[4]
        event.MomentTensor.m_tp = ten[5]

        cat.append(event)

    return cat
Exemplo n.º 37
0
    def _parseRecordDp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._intZero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + "." + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == "FX":
            orig_time_stderr = "Fixed"
        else:
            orig_time_stderr = self._floatWithFormat(orig_time_stderr, "2.1", scale)
        centroid_latitude = self._floatWithFormat(line[17:21], "4.2")
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinateSign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == "FX":
            lat_stderr = "Fixed"
        else:
            lat_stderr = self._floatWithFormat(lat_stderr, "3.2", scale)
        centroid_longitude = self._floatWithFormat(line[25:30], "5.2")
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinateSign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == "FX":
            lon_stderr = "Fixed"
        else:
            lon_stderr = self._floatWithFormat(lon_stderr, "3.2", scale)
        centroid_depth = self._floatWithFormat(line[34:38], "4.1")
        depth_stderr = line[38:40]
        if depth_stderr == "FX" or depth_stderr == "BD":
            depth_stderr = "Fixed"
        else:
            depth_stderr = self._floatWithFormat(depth_stderr, "2.1", scale)
        station_number = self._intZero(line[40:43])
        component_number = self._intZero(line[43:46])
        station_number2 = self._intZero(line[46:48])
        component_number2 = self._intZero(line[48:51])
        # unused: half_duration = self._floatWithFormat(line[51:54], '3.1')
        moment = self._floatWithFormat(line[54:56], "2.1")
        moment_stderr = self._floatWithFormat(line[56:58], "2.1")
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split("/")[-1]
        # Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != ".":
            origin = Origin()
            res_id = "/".join(
                (res_id_prefix, "origin", evid, source_contributor.lower(), "mw" + computation_type.lower())
            )
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info = CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime("%Y%m%d")
            origin.time = UTCDateTime(date + centroid_origin_time)
            # Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._storeUncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == "Fixed" and lon_stderr == "Fixed":
                origin.epicenter_fixed = True
            else:
                self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(lat_stderr))
                self._storeUncertainty(origin.longitude_errors, self._lonErrToDeg(lon_stderr, origin.latitude))
            if depth_stderr == "Fixed":
                origin.depth_type = "operator assigned"
            else:
                origin.depth_type = "from location"
                self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000)
            quality = OriginQuality()
            quality.used_station_count = station_number + station_number2
            quality.used_phase_count = component_number + component_number2
            origin.quality = quality
            origin.type = "centroid"
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = "/".join(
            (res_id_prefix, "focalmechanism", evid, source_contributor.lower(), "mw" + computation_type.lower())
        )
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info = CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            # this is required for QuakeML validation:
            res_id = "/".join((res_id_prefix, "no-origin"))
            moment_tensor.derived_origin_id = ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = "/".join(
            (res_id_prefix, "momenttensor", evid, source_contributor.lower(), "mw" + computation_type.lower())
        )
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._storeUncertainty(moment_tensor.scalar_moment_errors, moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == "C":
            res_id = "/".join((res_id_prefix, "methodID=CMT"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # CMT algorithm uses long-period body waves,
            # very-long-period surface waves and
            # intermediate period surface waves (since 2004
            # for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = "combined"
        if computation_type == "M":
            res_id = "/".join((res_id_prefix, "methodID=moment_tensor"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used by
            # "moment tensor" algorithm.
            data_used.wave_type = "unknown"
        elif computation_type == "B":
            res_id = "/".join((res_id_prefix, "methodID=broadband_data"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: is 'combined' correct here?
            data_used.wave_type = "combined"
        elif computation_type == "F":
            res_id = "/".join((res_id_prefix, "methodID=P-wave_first_motion"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = "P waves"
        elif computation_type == "S":
            res_id = "/".join((res_id_prefix, "methodID=scalar_moment"))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used
            # for scalar moment determination.
            data_used.wave_type = "unknown"
        moment_tensor.data_used = data_used
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism
Exemplo n.º 38
0
def _read_single_hypocenter(lines, coordinate_converter, original_picks):
    """
    Given a list of lines (starting with a 'NLLOC' line and ending with a
    'END_NLLOC' line), parse them into an Event.
    """
    try:
        # some paranoid checks..
        assert lines[0].startswith("NLLOC ")
        assert lines[-1].startswith("END_NLLOC")
        for line in lines[1:-1]:
            assert not line.startswith("NLLOC ")
            assert not line.startswith("END_NLLOC")
    except Exception:
        msg = ("This should not have happened, please report this as a bug at "
               "https://github.com/obspy/obspy/issues.")
        raise Exception(msg)

    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines[:-1]])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    # new NLLoc > 6.0 seems to add prefix 'run:' before date
    if date.startswith('run:'):
        date = date[4:]
    signature = signature.strip()
    creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    if coordinate_converter:
        # maximum likelihood origin location in km info line
        line = lines["HYPOCENTER"]
        x, y, z = coordinate_converter(*map(float, line.split()[1:7:2]))
    else:
        # maximum likelihood origin location lon lat info line
        line = lines["GEOGRAPHIC"]
        y, x, z = map(float, line.split()[8:13:2])

    # maximum likelihood origin time info line
    line = lines["GEOGRAPHIC"]

    year, mon, day, hour, min = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_xx = float(line.split()[7])
    covariance_yy = float(line.split()[13])
    covariance_zz = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    if "COMMENT" in lines:
        comment = lines["COMMENT"].strip()
        comment = comment.strip('\'"')
        comment = comment.strip()

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    o = Origin()
    event.origins = [o]
    event.preferred_origin_id = o.resource_id
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string, force_resource_id=False))
    event.comments.append(Comment(text=comment, force_resource_id=False))

    # SIGNATURE field's first item is LOCSIG, which is supposed to be
    # 'Identification of an individual, institiution or other entity'
    # according to
    # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_
    # so use it as author in creation info
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version,
                                       author=signature)
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version,
                                   author=signature)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
    except ValueError:
        if covariance_xx < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
    except ValueError:
        if covariance_yy < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        # network codes are not used by NonLinLoc, so they can not be known
        # when reading the .hyp file.. to conform with QuakeML standard set an
        # empty network code
        wid = WaveformStreamID(network_code="", station_code=station)
        # have to split this into ints for overflow to work correctly
        date, hourmin, sec = map(str, line[6:9])
        ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])]
        hm = [int(hourmin[:2]), int(hourmin[2:4])]
        t = UTCDateTime(*(ymd + hm), strict=False) + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    event.scope_resource_ids()

    return event
Exemplo n.º 39
0
    def _map_join2origin(self, db):
        """
        Return an Origin instance from an dict of CSS key/values
        
        Inputs
        ======
        db : dict of key/values of CSS fields related to the origin (see Join)

        Returns
        =======
        obspy.core.event.Origin

        Notes
        =====
        Any object that supports the dict 'get' method can be passed as
        input, e.g. OrderedDict, custom classes, etc.
        
        Join
        ----
        origin <- origerr (outer)

        """ 
        #-- Basic location ------------------------------------------
        origin = Origin()
        origin.latitude = db.get('lat')
        origin.longitude = db.get('lon')
        origin.depth = _km2m(db.get('depth'))
        origin.time = _utc(db.get('time'))
        origin.extra = {}
        
        #-- Quality -------------------------------------------------
        quality = OriginQuality(
            associated_phase_count = db.get('nass'),
            used_phase_count = db.get('ndef'),
            standard_error = db.get('sdobs'),
            )
        origin.quality = quality

        #-- Solution Uncertainties ----------------------------------
        # in CSS the ellipse is projected onto the horizontal plane
        # using the covariance matrix
        uncertainty = OriginUncertainty()
        a = _km2m(db.get('smajax'))
        b = _km2m(db.get('sminax'))
        s = db.get('strike')
        dep_u = _km2m(db.get('sdepth'))
        time_u = db.get('stime')

        uncertainty.max_horizontal_uncertainty = a
        uncertainty.min_horizontal_uncertainty = b
        uncertainty.azimuth_max_horizontal_uncertainty = s
        uncertainty.horizontal_uncertainty = a
        uncertainty.preferred_description = "horizontal uncertainty"

        if db.get('conf') is not None:
            uncertainty.confidence_level = db.get('conf') * 100.  

        if uncertainty.horizontal_uncertainty is not None:
            origin.origin_uncertainty = uncertainty

        #-- Parameter Uncertainties ---------------------------------
        if all([a, b, s]):
            n, e = _get_NE_on_ellipse(a, b, s)
            lat_u = _m2deg_lat(n)
            lon_u = _m2deg_lon(e, lat=origin.latitude)
            origin.latitude_errors = {'uncertainty': lat_u} 
            origin.longitude_errors = {'uncertainty': lon_u}
        if dep_u:
            origin.depth_errors = {'uncertainty': dep_u}
        if time_u:
            origin.time_errors = {'uncertainty': time_u}

        #-- Analyst-determined Status -------------------------------
        posted_author = _str(db.get('auth'))
        mode, status = self.get_event_status(posted_author)
        origin.evaluation_mode = mode
        origin.evaluation_status = status
        
        # Save etype per origin due to schema differences...
        css_etype = _str(db.get('etype'))
        # Compatible with future patch rename "_namespace" -> "namespace"
        origin.extra['etype'] = {
            'value': css_etype, 
            'namespace': CSS_NAMESPACE
            }

        origin.creation_info = CreationInfo(
            creation_time = _utc(db.get('lddate')),
            agency_id = self.agency, 
            version = db.get('orid'),
            author = posted_author,
            )
        origin.resource_id = self._rid(origin)
        return origin
Exemplo n.º 40
0
def request_gcmt(starttime, endtime, minmagnitude=None, mindepth=None, maxdepth=None, minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None):
	import mechanize
	from mechanize import Browser
	import re

	"""
	Description
	I am using mechanize. My attempt is just preliminary, for the current globalcmt.org site. 
	"""

	#Split numbers and text
	r = re.compile("([a-zA-Z]+)([0-9]+)")


	br = Browser()
	br.open('http://www.globalcmt.org/CMTsearch.html')
	#Site has just one form
	br.select_form(nr=0)

	br.form['yr']    = str(starttime.year)
	br.form['mo']    = str(starttime.month)
	br.form['day']   = str(starttime.day)
	br.form['oyr']   = str(endtime.year)
	br.form['omo']   = str(endtime.month)
	br.form['oday']  = str(endtime.day)
	br.form['list']  = ['4']
	br.form['itype'] = ['ymd']
	br.form['otype'] = ['ymd']

	if minmagnitude: br.form['lmw']   = str(minmagnitude)
	if minlatitude : br.form['llat']  = str(minlatitude)
	if maxlatitude : br.form['ulat']  = str(maxlatitude)
	if minlongitude: br.form['llon']  = str(minlongitude)
	if maxlongitude: br.form['ulon']  = str(maxlongitude)
	if mindepth    : br.form['lhd']   = str(mindepth)
	if maxdepth    : br.form['uhd']   = str(maxdepth)

	print("Submitting parameters to globalcmt.")
	req = br.submit()
	print("Retrieving data, creating catalog.")

	data = []
	for line in req:
		data.append(line) 

	data_chunked = _chunking_list(keyword='\n', list=data)
	origins = []
	magnitudes = []
	tensor = []

	for line in data_chunked:
		for element in line:
			if 'event name' in element:
				for content in element:
					org       = line[1].split()
					year      = int(r.match(org[0]).groups()[1])
					mon       = int(org[1])
					day       = int(org[2])
					hour      = int(org[3])
					minute    = int(org[4])
					sec_temp  = int(org[5].split('.')[0])
					msec_temp = int(org[5].split('.')[1])

				origins_temp = UTCDateTime(year, mon, day, hour, minute, sec_temp, msec_temp)
				#adding time shift located in line[3]
				origin       = origins_temp + float(line[3].split()[2])
				magnitude    = float(line[1].split()[10])
				latitude     = float(line[5].split()[1])
				longitude    = float(line[6].split()[1])
				depth        = 1000. * float(line[7].split()[1])
				m_rr         = float(line[8].split()[1])
				m_tt         = float(line[9].split()[1])
				m_pp         = float(line[10].split()[1])
				m_rt         = float(line[11].split()[1])
				m_rp         = float(line[12].split()[1])
				m_tp         = float(line[13].split()[1])

				magnitudes.append( ("Mw", magnitude) )
				origins.append( (latitude, longitude, depth, origin) )
				tensor.append( (m_rr, m_tt, m_pp, m_rt, m_rp, m_tp) )

	cat = Catalog()

	for mag, org, ten in zip(magnitudes, origins, tensor):
		# Create magnitude object.
		magnitude = Magnitude()


		magnitude.magnitude_type = mag[0]
		magnitude.mag = mag[1]
		# Write origin object.
		origin = Origin()
		origin.latitude = org[0]
		origin.longitude = org[1]
		origin.depth = org[2]
		origin.time = org[3]
		# Create event object and append to catalog object.
		event = Event()
		event.magnitudes.append(magnitude)
		event.origins.append(origin)

		event.MomentTensor = MomentTensor()
		event.MomentTensor.m_rr = ten[0]
		event.MomentTensor.m_tt = ten[1]
		event.MomentTensor.m_pp = ten[2]
		event.MomentTensor.m_rt = ten[3]
		event.MomentTensor.m_rp = ten[4]
		event.MomentTensor.m_tp = ten[5]

		cat.append(event)

	return cat
Exemplo n.º 41
0
    def _parse_record_dp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._int_zero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + '.' + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == 'FX':
            orig_time_stderr = 'Fixed'
        else:
            orig_time_stderr = \
                self._float_with_format(orig_time_stderr, '2.1', scale)
        centroid_latitude = self._float_with_format(line[17:21], '4.2')
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinate_sign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == 'FX':
            lat_stderr = 'Fixed'
        else:
            lat_stderr = self._float_with_format(lat_stderr, '3.2', scale)
        centroid_longitude = self._float_with_format(line[25:30], '5.2')
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinate_sign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == 'FX':
            lon_stderr = 'Fixed'
        else:
            lon_stderr = self._float_with_format(lon_stderr, '3.2', scale)
        centroid_depth = self._float_with_format(line[34:38], '4.1')
        depth_stderr = line[38:40]
        if depth_stderr == 'FX' or depth_stderr == 'BD':
            depth_stderr = 'Fixed'
        else:
            depth_stderr = self._float_with_format(depth_stderr, '2.1', scale)
        station_number = self._int_zero(line[40:43])
        component_number = self._int_zero(line[43:46])
        station_number2 = self._int_zero(line[46:48])
        component_number2 = self._int_zero(line[48:51])
        # unused: half_duration = self._float_with_format(line[51:54], '3.1')
        moment = self._float_with_format(line[54:56], '2.1')
        moment_stderr = self._float_with_format(line[56:58], '2.1')
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split('/')[-1]
        # Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != '.':
            origin = Origin()
            res_id = '/'.join((res_id_prefix, 'origin',
                               evid, source_contributor.lower(),
                               'mw' + computation_type.lower()))
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info = \
                CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime('%Y%m%d')
            origin.time = UTCDateTime(date + centroid_origin_time)
            # Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._store_uncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
                origin.epicenter_fixed = True
            else:
                self._store_uncertainty(origin.latitude_errors,
                                        self._lat_err_to_deg(lat_stderr))
                self._store_uncertainty(origin.longitude_errors,
                                        self._lon_err_to_deg(lon_stderr,
                                                             origin.latitude))
            if depth_stderr == 'Fixed':
                origin.depth_type = 'operator assigned'
            else:
                origin.depth_type = 'from location'
                self._store_uncertainty(origin.depth_errors,
                                        depth_stderr, scale=1000)
            quality = OriginQuality()
            quality.used_station_count = \
                station_number + station_number2
            quality.used_phase_count = \
                component_number + component_number2
            origin.quality = quality
            origin.origin_type = 'centroid'
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = '/'.join((res_id_prefix, 'focalmechanism',
                           evid, source_contributor.lower(),
                           'mw' + computation_type.lower()))
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info = \
            CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            # this is required for QuakeML validation:
            res_id = '/'.join((res_id_prefix, 'no-origin'))
            moment_tensor.derived_origin_id = \
                ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = '/'.join((res_id_prefix, 'momenttensor',
                           evid, source_contributor.lower(),
                           'mw' + computation_type.lower()))
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._store_uncertainty(moment_tensor.scalar_moment_errors,
                                moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == 'C':
            res_id = '/'.join((res_id_prefix, 'methodID=CMT'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # CMT algorithm uses long-period body waves,
            # very-long-period surface waves and
            # intermediate period surface waves (since 2004
            # for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = 'combined'
        if computation_type == 'M':
            res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used by
            # "moment tensor" algorithm.
            data_used.wave_type = 'unknown'
        elif computation_type == 'B':
            res_id = '/'.join((res_id_prefix, 'methodID=broadband_data'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: is 'combined' correct here?
            data_used.wave_type = 'combined'
        elif computation_type == 'F':
            res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = 'P waves'
        elif computation_type == 'S':
            res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            # FIXME: not sure which kind of data is used
            # for scalar moment determination.
            data_used.wave_type = 'unknown'
        moment_tensor.data_used = [data_used]
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism