Exemplo n.º 1
0
def readSeishubEventFile(filename):
    """
    Reads a Seishub event file and returns a ObsPy Catalog object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.event.readEvents` function, call this instead.

    :type filename: str
    :param filename: Seishub event file to be read.
    :rtype: :class:`~obspy.core.event.Catalog`
    :return: A ObsPy Catalog object.

    .. rubric:: Example
    """
    global CURRENT_TYPE

    base_name = os.path.basename(filename)

    if base_name.lower().startswith("baynet"):
        CURRENT_TYPE = "baynet"
    elif base_name.lower().startswith("earthworm"):
        CURRENT_TYPE = "earthworm"
    elif base_name.lower().startswith("gof"):
        CURRENT_TYPE = "seiscomp3"
    elif base_name.lower().startswith("obspyck") or base_name == "5622":
        CURRENT_TYPE = "obspyck"
    elif base_name.lower().startswith("toni"):
        CURRENT_TYPE = "toni"
    else:
        print "AAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHHHHHHHHHHHHH"
        raise Exception

    # Just init the parser, the SeisHub event file format has no namespaces.
    parser = XMLParser(filename)
    # Create new Event object.
    public_id = parser.xpath('event_id/value')[0].text

    # A Seishub event just specifies a single event so Catalog information is
    # not really given.
    catalog = Catalog()
    catalog.resource_id = "/".join([RESOURCE_ROOT, "catalog", public_id])

    # Read the event_type tag.
    account = parser.xpath2obj('event_type/account', parser, str)
    user = parser.xpath2obj('event_type/user', parser, str)
    global_evaluation_mode = parser.xpath2obj('event_type/value', parser, str)
    public = parser.xpath2obj('event_type/public', parser, str)
    public = {
        "True": True,
        "False": False,
        "true": True,
        "false": False
    }.get(public, None)
    if account is not None and account.lower() != "sysop":
        public = False
    # The author will be stored in the CreationInfo object. This will be the
    # creation info of the event as well as on all picks.
    author = user
    if CURRENT_TYPE in ["seiscomp3", "earthworm"]:
        public = False
        author = CURRENT_TYPE
        global_evaluation_mode = "automatic"
    elif CURRENT_TYPE in ["baynet", "toni"]:
        public = True
        author = CURRENT_TYPE
        global_evaluation_mode = "manual"
    creation_info = {
        "author": author,
        "agency_id": "Erdbebendienst Bayern",
        "agency_uri": "%s/agency" % RESOURCE_ROOT,
        "creation_time": NOW
    }

    # Create the event object.
    event = Event(resource_id="/".join([RESOURCE_ROOT, "event", public_id]),
                  creation_info=creation_info)
    # If account is None or 'sysop' and public is true, write 'public in the
    # comment, 'private' otherwise.
    event.extra = AttribDict()
    event.extra.public = {'value': public, 'namespace': NAMESPACE}
    event.extra.evaluationMode = {
        'value': global_evaluation_mode,
        'namespace': NAMESPACE
    }

    event_type = parser.xpath2obj('type', parser, str)
    if event_type is not None:
        if event_type == "induced earthquake":
            event_type = "induced or triggered event"
        if event_type != "null":
            event.event_type = event_type

    # Parse the origins.
    origins = parser.xpath("origin")
    if len(origins) > 1:
        msg = "Only files with a single origin are currently supported"
        raise Exception(msg)
    for origin_el in parser.xpath("origin"):
        origin = __toOrigin(parser, origin_el)
        event.origins.append(origin)
    # Parse the magnitudes.
    for magnitude_el in parser.xpath("magnitude"):
        magnitude = __toMagnitude(parser, magnitude_el, origin)
        if magnitude.mag is None:
            continue
        event.magnitudes.append(magnitude)
    # Parse the picks. Pass the global evaluation mode (automatic, manual)
    for pick_el in parser.xpath("pick"):
        pick = __toPick(parser, pick_el, global_evaluation_mode)
        if pick is None:
            continue
        event.picks.append(pick)
        # The arrival object gets the following things from the Seishub.pick
        # objects
        # arrival.time_weight = pick.phase_weight
        # arrival.time_residual = pick.phase_res
        # arrival.azimuth = pick.azimuth
        # arrival.take_off_angle = pick.incident
        # arrival.distance = hyp_dist
        arrival = __toArrival(parser, pick_el, global_evaluation_mode, pick)
        if event.origins:
            event.origins[0].arrivals.append(arrival)

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    # Parse the station magnitudes.
    for stat_magnitude_el in parser.xpath("stationMagnitude"):
        stat_magnitude = __toStationMagnitude(parser, stat_magnitude_el)
        event.station_magnitudes.append(stat_magnitude)

    # Parse the amplitudes
    # we don't reference their id in the corresponding station magnitude,
    # because we use one amplitude measurement for each component
    for el in parser.xpath("stationMagnitude/amplitude"):
        event.amplitudes.append(__toAmplitude(parser, el))

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    for _i, stat_mag in enumerate(event.station_magnitudes):
        contrib = StationMagnitudeContribution()
        weight = None
        # The order of station magnitude objects is the same as in the xml
        # file.
        weight = parser.xpath2obj("weight",
                                  parser.xpath("stationMagnitude")[_i], float)
        if weight is not None:
            contrib.weight = weight
        contrib.station_magnitude_id = stat_mag.resource_id
        event.magnitudes[0].station_magnitude_contributions.append(contrib)

    for foc_mec_el in parser.xpath("focalMechanism"):
        foc_mec = __toFocalMechanism(parser, foc_mec_el)
        if foc_mec is not None:
            event.focal_mechanisms.append(foc_mec)

    # Set the origin id for the focal mechanisms. There is only one origin per
    # SeisHub event file.
    for focmec in event.focal_mechanisms:
        focmec.triggering_origin_id = event.origins[0].resource_id

    # Add the event to the catalog
    catalog.append(event)

    return catalog
def readSeishubEventFile(filename):
    """
    Reads a Seishub event file and returns a ObsPy Catalog object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.event.readEvents` function, call this instead.

    :type filename: str
    :param filename: Seishub event file to be read.
    :rtype: :class:`~obspy.core.event.Catalog`
    :return: A ObsPy Catalog object.

    .. rubric:: Example
    """
    global CURRENT_TYPE

    base_name = os.path.basename(filename)

    if base_name.lower().startswith("baynet"):
        CURRENT_TYPE = "baynet"
    elif base_name.lower().startswith("earthworm"):
        CURRENT_TYPE = "earthworm"
    elif base_name.lower().startswith("gof"):
        CURRENT_TYPE = "seiscomp3"
    elif base_name.lower().startswith("obspyck") or base_name == "5622":
        CURRENT_TYPE = "obspyck"
    elif base_name.lower().startswith("toni"):
        CURRENT_TYPE = "toni"
    else:
        print "AAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHHHHHHHHHHHHH"
        raise Exception

    # Just init the parser, the SeisHub event file format has no namespaces.
    parser = XMLParser(filename)
    # Create new Event object.
    public_id = parser.xpath('event_id/value')[0].text

    # A Seishub event just specifies a single event so Catalog information is
    # not really given.
    catalog = Catalog()
    catalog.resource_id = "/".join([RESOURCE_ROOT, "catalog", public_id])

    # Read the event_type tag.
    account = parser.xpath2obj('event_type/account', parser, str)
    user = parser.xpath2obj('event_type/user', parser, str)
    global_evaluation_mode = parser.xpath2obj('event_type/value', parser, str)
    public = parser.xpath2obj('event_type/public', parser, str)
    public = {"True": True, "False": False}.get(public, None)
    if account is not None and account.lower() != "sysop":
        public = False
    # The author will be stored in the CreationInfo object. This will be the
    # creation info of the event as well as on all picks.
    author = user
    if CURRENT_TYPE in ["seiscomp3", "earthworm"]:
        author = CURRENT_TYPE
    creation_info = {"author": author,
        "agency_id": "Erdbebendienst Bayern",
        "agency_uri": "%s/agency" % RESOURCE_ROOT,
        "creation_time": NOW}

    # Create the event object.
    event = Event(resource_id="/".join([RESOURCE_ROOT, "event", public_id]),
        creation_info=creation_info)
    # If account is None or 'sysop' and public is true, write 'public in the
    # comment, 'private' otherwise.
    event.extra = AttribDict()
    event.extra.public = {'value': public, 'namespace': NAMESPACE}
    event.extra.evaluationMode = {'value': global_evaluation_mode, 'namespace': NAMESPACE}

    event_type = parser.xpath2obj('type', parser, str)
    if event_type is not None:
        if event_type == "induced earthquake":
            event_type = "induced or triggered event"
        if event_type != "null":
            event.event_type = event_type

    # Parse the origins.
    origins = parser.xpath("origin")
    if len(origins) > 1:
        msg = "Only files with a single origin are currently supported"
        raise Exception(msg)
    for origin_el in parser.xpath("origin"):
        origin = __toOrigin(parser, origin_el)
        event.origins.append(origin)
    # Parse the magnitudes.
    for magnitude_el in parser.xpath("magnitude"):
        magnitude = __toMagnitude(parser, magnitude_el, origin)
        if magnitude.mag is None:
            continue
        event.magnitudes.append(magnitude)
    # Parse the picks. Pass the global evaluation mode (automatic, manual)
    for pick_el in parser.xpath("pick"):
        pick = __toPick(parser, pick_el, global_evaluation_mode)
        if pick is None:
            continue
        event.picks.append(pick)
        # The arrival object gets the following things from the Seishub.pick
        # objects
        # arrival.time_weight = pick.phase_weight
        # arrival.time_residual = pick.phase_res
        # arrival.azimuth = pick.azimuth
        # arrival.take_off_angle = pick.incident
        # arrival.distance = hyp_dist
        arrival = __toArrival(parser, pick_el, global_evaluation_mode, pick)
        if event.origins:
            event.origins[0].arrivals.append(arrival)

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    # Parse the station magnitudes.
    for stat_magnitude_el in parser.xpath("stationMagnitude"):
        stat_magnitude = __toStationMagnitude(parser, stat_magnitude_el)
        event.station_magnitudes.append(stat_magnitude)

    # Parse the amplitudes
    # we don't reference their id in the corresponding station magnitude,
    # because we use one amplitude measurement for each component
    for el in parser.xpath("stationMagnitude/amplitude"):
        event.amplitudes.append(__toAmplitude(parser, el))

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    for _i, stat_mag in enumerate(event.station_magnitudes):
        contrib = StationMagnitudeContribution()
        weight = None
        # The order of station magnitude objects is the same as in the xml
        # file.
        weight = parser.xpath2obj("weight",
            parser.xpath("stationMagnitude")[_i], float)
        if weight is not None:
            contrib.weight = weight
        contrib.station_magnitude_id = stat_mag.resource_id
        event.magnitudes[0].station_magnitude_contributions.append(contrib)

    for foc_mec_el in parser.xpath("focalMechanism"):
        foc_mec = __toFocalMechanism(parser, foc_mec_el)
        if foc_mec is not None:
            event.focal_mechanisms.append(foc_mec)

    # Set the origin id for the focal mechanisms. There is only one origin per
    # SeisHub event file.
    for focmec in event.focal_mechanisms:
        focmec.triggering_origin_id = event.origins[0].resource_id

    # Add the event to the catalog
    catalog.append(event)

    return catalog
Exemplo n.º 3
0
def _read_single_event(event_file, locate_dir, units, local_mag_ph):
    """
    Parse an event file from QuakeMigrate into an obspy Event object.

    Parameters
    ----------
    event_file : `pathlib.Path` object
        Path to .event file to read.
    locate_dir : `pathlib.Path` object
        Path to locate directory (contains "events", "picks" etc. directories).
    units : {"km", "m"}
        Grid projection coordinates for QM LUT (determines units of depths and
        uncertainties in the .event files).
    local_mag_ph : {"S", "P"}
        Amplitude measurement used to calculate local magnitudes.

    Returns
    -------
    event : `obspy.Event` object
        Event object populated with all available information output by
        :class:`~quakemigrate.signal.scan.locate()`, including event locations
        and uncertainties, picks, and amplitudes and magnitudes if available.

    """

    # Parse information from event file
    event_info = pd.read_csv(event_file).iloc[0]
    event_uid = str(event_info["EventID"])

    # Set distance conversion factor (from units of QM LUT projection units).
    if units == "km":
        factor = 1e3
    elif units == "m":
        factor = 1
    else:
        raise AttributeError(f"units must be 'km' or 'm'; not {units}")

    # Create event object to store origin and pick information
    event = Event()
    event.extra = AttribDict()
    event.resource_id = str(event_info["EventID"])
    event.creation_info = CreationInfo(author="QuakeMigrate",
                                       version=quakemigrate.__version__)

    # Add COA info to extra
    event.extra.coa = {"value": event_info["COA"], "namespace": ns}
    event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns}
    event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns}
    event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns}
    event.extra.dec_coa_norm = {
        "value": event_info["DEC_COA_NORM"],
        "namespace": ns
    }

    # Determine location of cut waveform data - add to event object as a
    # custom extra attribute.
    mseed = locate_dir / "raw_cut_waveforms" / event_uid
    event.extra.cut_waveforms_file = {
        "value": str(mseed.with_suffix(".m").resolve()),
        "namespace": ns
    }
    if (locate_dir / "real_cut_waveforms").exists():
        mseed = locate_dir / "real_cut_waveforms" / event_uid
        event.extra.real_cut_waveforms_file = {
            "value": str(mseed.with_suffix(".m").resolve()),
            "namespace": ns
        }
    if (locate_dir / "wa_cut_waveforms").exists():
        mseed = locate_dir / "wa_cut_waveforms" / event_uid
        event.extra.wa_cut_waveforms_file = {
            "value": str(mseed.with_suffix(".m").resolve()),
            "namespace": ns
        }

    # Create origin with spline location and set to preferred event origin.
    origin = Origin()
    origin.method_id = "spline"
    origin.longitude = event_info["X"]
    origin.latitude = event_info["Y"]
    origin.depth = event_info["Z"] * factor
    origin.time = UTCDateTime(event_info["DT"])
    event.origins = [origin]
    event.preferred_origin_id = origin.resource_id

    # Create origin with gaussian location and associate with event
    origin = Origin()
    origin.method_id = "gaussian"
    origin.longitude = event_info["GAU_X"]
    origin.latitude = event_info["GAU_Y"]
    origin.depth = event_info["GAU_Z"] * factor
    origin.time = UTCDateTime(event_info["DT"])
    event.origins.append(origin)

    ouc = OriginUncertainty()
    ce = ConfidenceEllipsoid()
    ce.semi_major_axis_length = event_info["COV_ErrY"] * factor
    ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor
    ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor
    ce.major_axis_plunge = 0
    ce.major_axis_azimuth = 0
    ce.major_axis_rotation = 0
    ouc.confidence_ellipsoid = ce
    ouc.preferred_description = "confidence ellipsoid"

    # Set uncertainties for both as the gaussian uncertainties
    for origin in event.origins:
        origin.longitude_errors.uncertainty = kilometer2degrees(
            event_info["GAU_ErrX"] * factor / 1e3)
        origin.latitude_errors.uncertainty = kilometer2degrees(
            event_info["GAU_ErrY"] * factor / 1e3)
        origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor
        origin.origin_uncertainty = ouc

    # Add OriginQuality info to each origin?
    for origin in event.origins:
        origin.origin_type = "hypocenter"
        origin.evaluation_mode = "automatic"

    # --- Handle picks file ---
    pick_file = locate_dir / "picks" / event_uid
    if pick_file.with_suffix(".picks").is_file():
        picks = pd.read_csv(pick_file.with_suffix(".picks"))
    else:
        return None

    for _, pickline in picks.iterrows():
        station = str(pickline["Station"])
        phase = str(pickline["Phase"])
        wid = WaveformStreamID(network_code="", station_code=station)

        for method in ["modelled", "autopick"]:
            pick = Pick()
            pick.extra = AttribDict()
            pick.waveform_id = wid
            pick.method_id = method
            pick.phase_hint = phase
            if method == "autopick" and str(pickline["PickTime"]) != "-1":
                pick.time = UTCDateTime(pickline["PickTime"])
                pick.time_errors.uncertainty = float(pickline["PickError"])
                pick.extra.snr = {
                    "value": float(pickline["SNR"]),
                    "namespace": ns
                }
            elif method == "modelled":
                pick.time = UTCDateTime(pickline["ModelledTime"])
            else:
                continue
            event.picks.append(pick)

    # --- Handle amplitudes file ---
    amps_file = locate_dir / "amplitudes" / event_uid
    if amps_file.with_suffix(".amps").is_file():
        amps = pd.read_csv(amps_file.with_suffix(".amps"))

        i = 0
        for _, ampsline in amps.iterrows():
            wid = WaveformStreamID(seed_string=ampsline["id"])
            noise_amp = ampsline["Noise_amp"] / 1000  # mm to m
            for phase in ["P_amp", "S_amp"]:
                amp = Amplitude()
                if pd.isna(ampsline[phase]):
                    continue
                amp.generic_amplitude = ampsline[phase] / 1000  # mm to m
                amp.generic_amplitude_errors.uncertainty = noise_amp
                amp.unit = "m"
                amp.type = "AML"
                amp.method_id = phase
                amp.period = 1 / ampsline[f"{phase[0]}_freq"]
                amp.time_window = TimeWindow(
                    reference=UTCDateTime(ampsline[f"{phase[0]}_time"]))
                # amp.pick_id = ?
                amp.waveform_id = wid
                # amp.filter_id = ?
                amp.magnitude_hint = "ML"
                amp.evaluation_mode = "automatic"
                amp.extra = AttribDict()
                try:
                    amp.extra.filter_gain = {
                        "value": ampsline[f"{phase[0]}_filter_gain"],
                        "namespace": ns
                    }
                    amp.extra.avg_amp = {
                        "value": ampsline[f"{phase[0]}_avg_amp"] / 1000,  # m
                        "namespace": ns
                    }
                except KeyError:
                    pass

                if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]):
                    i += 1
                    stat_mag = StationMagnitude()
                    stat_mag.extra = AttribDict()
                    # stat_mag.origin_id = ? local_mag_loc
                    stat_mag.mag = ampsline["ML"]
                    stat_mag.mag_errors.uncertainty = ampsline["ML_Err"]
                    stat_mag.station_magnitude_type = "ML"
                    stat_mag.amplitude_id = amp.resource_id
                    stat_mag.extra.picked = {
                        "value": ampsline["is_picked"],
                        "namespace": ns
                    }
                    stat_mag.extra.epi_dist = {
                        "value": ampsline["epi_dist"],
                        "namespace": ns
                    }
                    stat_mag.extra.z_dist = {
                        "value": ampsline["z_dist"],
                        "namespace": ns
                    }

                    event.station_magnitudes.append(stat_mag)

                event.amplitudes.append(amp)

        mag = Magnitude()
        mag.extra = AttribDict()
        mag.mag = event_info["ML"]
        mag.mag_errors.uncertainty = event_info["ML_Err"]
        mag.magnitude_type = "ML"
        # mag.origin_id = ?
        mag.station_count = i
        mag.evaluation_mode = "automatic"
        mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns}

        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id

    return event