Beispiel #1
0
    def point_in_domain(self, latitude, longitude):
        """
        Tests if the point is in the domain. Returns True/False

        :param latitude: The latitude of the point.
        :param longitude: The longitude of the point.
        """
        from lasif.utils import point_in_domain

        domain = self.comm.project.domain

        if domain == "global":
            return True

        return point_in_domain(
            latitude,
            longitude,
            domain=domain["bounds"],
            rotation_axis=domain["rotation_axis"],
            rotation_angle_in_degree=domain["rotation_angle"],
        )
Beispiel #2
0
    def is_event_station_raypath_within_boundaries(
        self, event_latitude, event_longitude, station_latitude, station_longitude, raypath_steps=25
    ):
        """
        Checks if the full station-event raypath is within the project's domain
        boundaries.

        Returns True if this is the case, False if not.

        :type event_latitude: float
        :param event_latitude: The event latitude.
        :type event_longitude: float
        :param event_longitude: The event longitude.
        :type station_latitude: float
        :param station_latitude: The station latitude.
        :type station_longitude: float
        :param station_longitude: The station longitude.
        :type raypath_steps: int
        :param raypath_steps: The number of discrete points along the raypath
            that will be checked. Optional.
        """
        from lasif.utils import greatcircle_points, Point, point_in_domain

        domain = self.comm.project.domain

        for point in greatcircle_points(
            Point(station_latitude, station_longitude), Point(event_latitude, event_longitude), max_npts=raypath_steps
        ):

            if not point_in_domain(
                point.lat,
                point.lng,
                domain["bounds"],
                rotation_axis=domain["rotation_axis"],
                rotation_angle_in_degree=domain["rotation_angle"],
            ):
                return False
        return True
Beispiel #3
0
    def _validate_event_files(self):
        """
        Validates all event files in the currently active project.

        The following tasks are performed:
            * Validate against QuakeML 1.2 scheme.
            * Check for duplicate ids amongst all QuakeML files.
            * Make sure they contain at least one origin, magnitude and focal
              mechanism object.
            * Some simply sanity checks so that the event depth is reasonable
              and the moment tensor values as well. This is rather fragile and
              mainly intended to detect values specified in wrong units.
            * Events that are too close in time. Events that are less then one
              hour apart can in general not be used for adjoint tomography.
              This will naturally also detect duplicate events.
        """
        import collections
        import itertools
        import math
        from obspy import readEvents
        from obspy.core.quakeml import _validate as validate_quakeml
        from lasif import utils
        from lxml import etree

        print "Validating %i event files ..." % self.comm.events.count()

        # Start with the schema validation.
        print "\tValidating against QuakeML 1.2 schema ",
        all_valid = True
        for event in self.comm.events.get_all_events().values():
            filename = event["filename"]
            self._flush_point()
            if validate_quakeml(filename) is not True:
                all_valid = False
                msg = (
                    "ERROR: "
                    "The QuakeML file '{basename}' did not validate against "
                    "the QuakeML 1.2 schema. Unfortunately the error messages "
                    "delivered by lxml are not useful at all. To get useful "
                    "error messages make sure jing is installed "
                    "('brew install jing' (OSX) or "
                    "'sudo apt-get install jing' (Debian/Ubuntu)) and "
                    "execute the following command:\n\n"
                    "\tjing http://quake.ethz.ch/schema/rng/QuakeML-1.2.rng "
                    "{filename}\n\n"
                    "Alternatively you could also use the "
                    "'lasif add_spud_event' command to redownload the event "
                    "if it is in the GCMT "
                    "catalog.\n\n"
                ).format(basename=os.path.basename(filename), filename=os.path.relpath(filename))
                self._add_report(msg)
        if all_valid is True:
            self._print_ok_message()
        else:
            self._print_fail_message()

        # Now check for duplicate public IDs.
        print "\tChecking for duplicate public IDs ",
        ids = collections.defaultdict(list)
        for event in self.comm.events.get_all_events().values():
            filename = event["filename"]
            self._flush_point()
            # Now walk all files and collect all public ids. Each should be
            # unique!
            with open(filename, "rt") as fh:
                for event, elem in etree.iterparse(fh, events=("start",)):
                    if "publicID" not in elem.keys() or elem.tag.endswith("eventParameters"):
                        continue
                    ids[elem.get("publicID")].append(filename)
        ids = {key: list(set(value)) for (key, value) in ids.iteritems() if len(value) > 1}
        if not ids:
            self._print_ok_message()
        else:
            self._print_fail_message()
            self._add_report(
                "Found the following duplicate publicIDs:\n"
                + "\n".join(
                    [
                        "\t%s in files: %s" % (id_string, ", ".join([os.path.basename(i) for i in faulty_files]))
                        for id_string, faulty_files in ids.iteritems()
                    ]
                ),
                error_count=len(ids),
            )

        def print_warning(filename, message):
            self._add_report(
                "WARNING: File '{event_name}' "
                "contains {msg}.\n".format(event_name=os.path.basename(filename), msg=message)
            )

        # Performing simple sanity checks.
        print "\tPerforming some basic sanity checks ",
        all_good = True
        for event in self.comm.events.get_all_events().values():
            filename = event["filename"]
            self._flush_point()
            cat = readEvents(filename)
            filename = os.path.basename(filename)
            # Check that all files contain exactly one event!
            if len(cat) != 1:
                all_good = False
                print_warning(filename, "%i events instead of only one." % len(cat))
            event = cat[0]

            # Sanity checks related to the origin.
            if not event.origins:
                all_good = False
                print_warning(filename, "no origin")
                continue
            origin = event.preferred_origin() or event.origins[0]
            if origin.depth % 100.0:
                all_good = False
                print_warning(
                    filename,
                    "a depth of %.1f meters. This kind of accuracy "
                    "seems unrealistic. The depth in the QuakeML "
                    "file has to be specified in meters. Checking "
                    "all other QuakeML files for the correct units "
                    "might be a good idea" % origin.depth,
                )
            if origin.depth > (800.0 * 1000.0):
                all_good = False
                print_warning(filename, "a depth of more than 800 km. This is" " likely wrong.")

            # Sanity checks related to the magnitude.
            if not event.magnitudes:
                all_good = False
                print_warning(filename, "no magnitude")
                continue

            # Sanity checks related to the focal mechanism.
            if not event.focal_mechanisms:
                all_good = False
                print_warning(filename, "no focal mechanism")
                continue

            focmec = event.preferred_focal_mechanism() or event.focal_mechanisms[0]
            if not hasattr(focmec, "moment_tensor") or not focmec.moment_tensor:
                all_good = False
                print_warning(filename, "no moment tensor")
                continue

            mt = focmec.moment_tensor
            if not hasattr(mt, "tensor") or not mt.tensor:
                all_good = False
                print_warning(filename, "no actual moment tensor")
                continue
            tensor = mt.tensor

            # Convert the moment tensor to a magnitude and see if it is
            # reasonable.
            mag_in_file = event.preferred_magnitude() or event.magnitudes[0]
            mag_in_file = mag_in_file.mag
            M_0 = 1.0 / math.sqrt(2.0) * math.sqrt(tensor.m_rr ** 2 + tensor.m_tt ** 2 + tensor.m_pp ** 2)
            magnitude = 2.0 / 3.0 * math.log10(M_0) - 6.0
            # Use some buffer to account for different magnitudes.
            if not (mag_in_file - 1.0) < magnitude < (mag_in_file + 1.0):
                all_good = False
                print_warning(
                    filename,
                    "a moment tensor that would result in a moment "
                    "magnitude of %.2f. The magnitude specified in "
                    "the file is %.2f. Please check that all "
                    "components of the tensor are in Newton * meter" % (magnitude, mag_in_file),
                )

        if all_good is True:
            self._print_ok_message()
        else:
            self._print_fail_message()

        # Collect event times
        event_infos = self.comm.events.get_all_events().values()

        # Now check the time distribution of events.
        print "\tChecking for duplicates and events too close in time %s" % (self.comm.events.count() * "."),
        all_good = True
        # Sort the events by time.
        event_infos = sorted(event_infos, key=lambda x: x["origin_time"])
        # Loop over adjacent indices.
        a, b = itertools.tee(event_infos)
        next(b, None)
        for event_1, event_2 in itertools.izip(a, b):
            time_diff = abs(event_2["origin_time"] - event_1["origin_time"])
            # If time difference is under one hour, it could be either a
            # duplicate event or interfering events.
            if time_diff <= 3600.0:
                all_good = False
                self._add_report(
                    "WARNING: "
                    "The time difference between events '{file_1}' and "
                    "'{file_2}' is only {diff:.1f} minutes. This could "
                    "be either due to a duplicate event or events that have "
                    "interfering waveforms.\n".format(
                        file_1=event_1["filename"], file_2=event_2["filename"], diff=time_diff / 60.0
                    )
                )
        if all_good is True:
            self._print_ok_message()
        else:
            self._print_fail_message()

        # Check that all events fall within the chosen boundaries.
        print "\tAssure all events are in chosen domain %s" % (self.comm.events.count() * "."),
        all_good = True
        domain = self.comm.project.domain
        for event in event_infos:
            if (
                utils.point_in_domain(
                    event["latitude"],
                    event["longitude"],
                    domain["bounds"],
                    domain["rotation_axis"],
                    domain["rotation_angle"],
                )
                is True
            ):
                continue
            all_good = False
            self._add_report(
                "\nWARNING: "
                "Event '{filename}' is out of bounds of the chosen domain."
                "\n".format(filename=event["filename"])
            )
        if all_good is True:
            self._print_ok_message()
        else:
            self._print_fail_message()