Esempio n. 1
0
def test_no_origin():
    # No argument
    with pytest.raises(Exception):
        get_rupture()

    # Wrong type
    with pytest.raises(Exception):
        get_rupture(7.3)
Esempio n. 2
0
    def _event_station_metrics(self, event):
        self.eventid = event.id
        logging.info('Computing station metrics for event %s...' %
                     self.eventid)
        event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
        workname = os.path.join(event_dir, WORKSPACE_NAME)
        if not os.path.isfile(workname):
            logging.info(
                'No workspace file found for event %s. Please run '
                'subcommand \'assemble\' to generate workspace file.' %
                self.eventid)
            logging.info('Continuing to next event.')
            return event.id

        self.workspace = StreamWorkspace.open(workname)
        self._get_pstreams()

        if not (hasattr(self, 'pstreams') and len(self.pstreams) > 0):
            logging.info('No streams found. Nothing to do. Goodbye.')
            self.workspace.close()
            return event.id

        rupture_file = get_rupture_file(event_dir)
        origin = Origin({
            'id': self.eventid,
            'netid': '',
            'network': '',
            'lat': event.latitude,
            'lon': event.longitude,
            'depth': event.depth_km,
            'locstring': '',
            'mag': event.magnitude,
            'time': event.time
        })
        self.origin = origin
        rupture = get_rupture(origin, rupture_file)

        sta_lats = []
        sta_lons = []
        sta_elev = []
        self.sta_repi = []
        self.sta_rhyp = []
        self.sta_baz = []
        for st in self.pstreams:
            sta_lats.append(st[0].stats.coordinates.latitude)
            sta_lons.append(st[0].stats.coordinates.longitude)
            sta_elev.append(st[0].stats.coordinates.elevation)
            geo_tuple = gps2dist_azimuth(st[0].stats.coordinates.latitude,
                                         st[0].stats.coordinates.longitude,
                                         origin.lat, origin.lon)
            self.sta_repi.append(geo_tuple[0] / M_PER_KM)
            self.sta_baz.append(geo_tuple[1])
            self.sta_rhyp.append(
                distance(st[0].stats.coordinates.longitude,
                         st[0].stats.coordinates.latitude,
                         -st[0].stats.coordinates.elevation / M_PER_KM,
                         origin.lon, origin.lat, origin.depth))

        if isinstance(rupture, PointRupture):
            self._get_ps2ff_splines()
            rjb_hat = self.rjb_spline(self.sta_repi)
            rjb_mean = rjb_hat[0]
            rjb_var = rjb_hat[1]
            rrup_hat = self.rrup_spline(self.sta_repi)
            rrup_mean = rrup_hat[0]
            rrup_var = rrup_hat[1]
            gc2_rx = np.full_like(rjb_mean, np.nan)
            gc2_ry = np.full_like(rjb_mean, np.nan)
            gc2_ry0 = np.full_like(rjb_mean, np.nan)
            gc2_U = np.full_like(rjb_mean, np.nan)
            gc2_T = np.full_like(rjb_mean, np.nan)
        else:
            logging.info('******************************')
            logging.info('* Found rupture              *')
            logging.info('******************************')
            sta_lons = np.array(sta_lons)
            sta_lats = np.array(sta_lats)
            elev = np.full_like(sta_lons, ELEVATION_FOR_DISTANCE_CALCS)
            rrup_mean, rrup_var = rupture.computeRrup(sta_lons, sta_lats, elev)
            rjb_mean, rjb_var = rupture.computeRjb(sta_lons, sta_lats, elev)
            rrup_var = np.full_like(rrup_mean, np.nan)
            rjb_var = np.full_like(rjb_mean, np.nan)
            gc2_dict = rupture.computeGC2(sta_lons, sta_lats, elev)
            gc2_rx = gc2_dict['rx']
            gc2_ry = gc2_dict['ry']
            gc2_ry0 = gc2_dict['ry0']
            gc2_U = gc2_dict['U']
            gc2_T = gc2_dict['T']

            # If we don't have a point rupture, then back azimuth needs
            # to be calculated to the closest point on the rupture
            self.sta_baz = []
            for i in range(len(self.pstreams)):
                dists = []
                bazs = []
                for quad in rupture._quadrilaterals:
                    P0, P1, P2, P3 = quad
                    for point in [P0, P1]:
                        dist, az, baz = gps2dist_azimuth(
                            point.y, point.x, sta_lats[i], sta_lons[i])
                        dists.append(dist)
                        bazs.append(baz)
                self.sta_baz.append(bazs[np.argmin(dists)])

        for i, stream in enumerate(self.pstreams):
            logging.info('Calculating station metrics for %s...' %
                         stream.get_id())
            summary = StationSummary.from_config(stream,
                                                 event=event,
                                                 config=self.gmrecords.conf,
                                                 calc_waveform_metrics=False,
                                                 calc_station_metrics=False,
                                                 rupture=rupture,
                                                 vs30_grids=self.vs30_grids)

            summary._distances = {
                'epicentral': self.sta_repi[i],
                'hypocentral': self.sta_rhyp[i],
                'rupture': rrup_mean[i],
                'rupture_var': rrup_var[i],
                'joyner_boore': rjb_mean[i],
                'joyner_boore_var': rjb_var[i],
                'gc2_rx': gc2_rx[i],
                'gc2_ry': gc2_ry[i],
                'gc2_ry0': gc2_ry0[i],
                'gc2_U': gc2_U[i],
                'gc2_T': gc2_T[i]
            }
            summary._back_azimuth = self.sta_baz[i]
            if self.vs30_grids is not None:
                for vs30_name in self.vs30_grids.keys():
                    tmpgrid = self.vs30_grids[vs30_name]
                    summary._vs30[vs30_name] = {
                        'value':
                        tmpgrid['grid_object'].getValue(
                            float(sta_lats[i]), float(sta_lons[i])),
                        'column_header':
                        tmpgrid['column_header'],
                        'readme_entry':
                        tmpgrid['readme_entry'],
                        'units':
                        tmpgrid['units']
                    }

            xmlstr = summary.get_station_xml()
            metricpath = '/'.join([
                format_netsta(stream[0].stats),
                format_nslit(stream[0].stats, stream.get_inst(), self.eventid)
            ])
            self.workspace.insert_aux(xmlstr,
                                      'StationMetrics',
                                      metricpath,
                                      overwrite=self.gmrecords.args.overwrite)
            logging.info('Added station metrics to workspace files '
                         'with tag \'%s\'.' % self.gmrecords.args.label)

        self.workspace.close()
        return event.id
Esempio n. 3
0
    def calcMetrics(self, eventid, stations=None, labels=None, config=None,
                    streams=None, stream_label=None, rupture_file=None,
                    calc_station_metrics=True, calc_waveform_metrics=True):
        """
        Calculate waveform and/or station metrics for a set of waveforms.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            stations (list):
                List of stations to create metrics for.
            labels (list):
                List of processing labels to create metrics for.
            config (dict):
                Configuration dictionary.
            streams (StreamCollection):
                Optional StreamCollection object to create metrics for.
            stream_label (str):
                Label to be used in the metrics path when providing a
                StreamCollection.
            rupture_file (str):
                Path pointing to the rupture file.
            calc_station_metrics (bool):
                Whether to calculate station metrics. Default is True.
            calc_waveform_metrics (bool):
                Whether to calculate waveform metrics. Default is True.
        """
        if not self.hasEvent(eventid):
            fmt = 'No event matching %s found in workspace.'
            raise KeyError(fmt % eventid)

        if streams is None:
            streams = self.getStreams(
                eventid, stations=stations, labels=labels)

        event = self.getEvent(eventid)

        # Load the rupture file
        origin = Origin({
            'id': event.id,
            'netid': '',
            'network': '',
            'lat': event.latitude,
            'lon': event.longitude,
            'depth': event.depth_km,
            'locstring': '',
            'mag': event.magnitude,
            'time': event.time
        })
        rupture = get_rupture(origin, rupture_file)

        vs30_grids = None
        if config is not None:
            if 'vs30' in config['metrics']:
                vs30_grids = config['metrics']['vs30']
                for vs30_name in vs30_grids:
                    vs30_grids[vs30_name]['grid_object'] = GMTGrid.load(
                        vs30_grids[vs30_name]['file'])

        for stream in streams:
            instrument = stream.get_id()
            logging.info('Calculating stream metrics for %s...' % instrument)

            try:
                summary = StationSummary.from_config(
                    stream, event=event, config=config,
                    calc_waveform_metrics=calc_waveform_metrics,
                    calc_station_metrics=calc_station_metrics,
                    rupture=rupture, vs30_grids=vs30_grids)
            except BaseException as pgme:
                fmt = ('Could not create stream metrics for event %s,'
                       'instrument %s: "%s"')
                logging.warning(fmt % (eventid, instrument, str(pgme)))
                continue

            if calc_waveform_metrics and stream.passed:
                xmlstr = summary.get_metric_xml()
                if stream_label is not None:
                    tag = '%s_%s' % (eventid, stream_label)
                else:
                    tag = stream.tag
                metricpath = '/'.join([
                    format_netsta(stream[0].stats),
                    format_nslit(stream[0].stats, stream.get_inst(), tag),
                ])
                self.insert_aux(xmlstr, 'WaveFormMetrics', metricpath)

            if calc_station_metrics:
                xmlstr = summary.get_station_xml()
                metricpath = '/'.join([
                    format_netsta(stream[0].stats),
                    format_nslit(stream[0].stats, stream.get_inst(), eventid)
                ])
                self.insert_aux(xmlstr, 'StationMetrics', metricpath)
Esempio n. 4
0
    def _event_station_metrics(self, event):
        self.eventid = event.id
        logging.info('Computing station metrics for event %s...' %
                     self.eventid)
        event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
        workname = os.path.join(event_dir, WORKSPACE_NAME)
        if not os.path.isfile(workname):
            logging.info(
                'No workspace file found for event %s. Please run '
                'subcommand \'assemble\' to generate workspace file.' %
                self.eventid)
            logging.info('Continuing to next event.')
            return event.id

        self.workspace = StreamWorkspace.open(workname)
        self._get_pstreams()

        rupture_file = get_rupture_file(event_dir)
        origin = Origin({
            'id': self.eventid,
            'netid': '',
            'network': '',
            'lat': event.latitude,
            'lon': event.longitude,
            'depth': event.depth_km,
            'locstring': '',
            'mag': event.magnitude,
            'time': event.time
        })
        rupture = get_rupture(origin, rupture_file)

        if not hasattr(self, 'pstreams'):
            logging.info('No processed waveforms available. No station '
                         'metrics computed.')
            self.workspace.close()
            return

        for stream in self.pstreams:
            logging.info('Calculating station metrics for %s...' %
                         stream.get_id())
            summary = StationSummary.from_config(stream,
                                                 event=event,
                                                 config=self.gmrecords.conf,
                                                 calc_waveform_metrics=False,
                                                 calc_station_metrics=True,
                                                 rupture=rupture,
                                                 vs30_grids=self.vs30_grids)
            xmlstr = summary.get_station_xml()
            metricpath = '/'.join([
                format_netsta(stream[0].stats),
                format_nslit(stream[0].stats, stream.get_inst(), self.eventid)
            ])
            self.workspace.insert_aux(xmlstr,
                                      'StationMetrics',
                                      metricpath,
                                      overwrite=self.gmrecords.args.overwrite)
            logging.info('Added station metrics to workspace files '
                         'with tag \'%s\'.' % self.gmrecords.args.label)

        self.workspace.close()
        return event.id
Esempio n. 5
0
def test_incorrect():
    # Number of points in polyon is even
    rupture_text = """# Source: Ji, C., D. V. Helmberger, D. J. Wald, and \
K.-F. Ma (2003). Slip history and dynamic implications of the 1999 Chi-Chi, \
Taiwan, earthquake, J. Geophys. Res. 108, 2412, doi:10.1029/2002JB001764.
    120.72300 24.27980 	0
    121.00000 24.05000	17
    121.09300 24.07190	17
    121.04300 24.33120	17
    121.04300 24.33120	17
    120.72300 24.27980	0
    >
    120.72300 24.27980	0
    120.68000 23.70000	0
    120.97200 23.60400	17
    121.00000 24.05000	17
    120.72300 24.27980	0
    >
    120.97200 23.60400	17
    120.68000 23.70000	0
    120.58600 23.58850	0
    120.78900 23.40240	17
    120.97200 23.60400	17"""  # noqa

    # Rupture requires an origin even when not used:
    origin = Origin({
        'id': 'test',
        'lon': 0,
        'lat': 0,
        'depth': 5.0,
        'mag': 7.0,
        'netid': 'us',
        'network': '',
        'locstring': '',
        'time': HistoricTime.utcfromtimestamp(time.time())
    })
    cbuf = io.StringIO(rupture_text)
    with pytest.raises(Exception):
        get_rupture(origin, cbuf)

    # Top points must be first
    rupture_text = """# Test
    120.72300 24.27980 	0
    121.00000 24.05000	17
    121.09300 24.07190	17
    121.04300 24.33120	17
    120.72300 24.27980	0"""  # noqa
    cbuf = io.StringIO(rupture_text)
    with pytest.raises(Exception):
        get_rupture(origin, cbuf)

    # Wrong order of lat/lon
    rupture_text = """# Test
    -118.421 34.315  5.000
    -118.587 34.401  5.000
    -118.693 34.261 20.427
    -118.527 34.175 20.427
    -118.421 34.315 5.000
    """  # noqa
    cbuf = io.StringIO(rupture_text)
    with pytest.raises(Exception):
        get_rupture(origin, cbuf, new_format=False)

    # Wrong order of lat/lon
    rupture_text = """# Test
    34.315 -118.421  5.000
    34.401 -118.587  5.000
    34.261 -118.693 20.427
    34.175 -118.527 20.427
    34.315 -118.421  5.000
    """  # noqa
    cbuf = io.StringIO(rupture_text)
    with pytest.raises(Exception):
        get_rupture(origin, cbuf, new_format=True)

    # Unclosed segments
    rupture_text = """# Test
    34.315 -118.421  5.000
    34.401 -118.587  5.000
    34.261 -118.693 20.427
    34.175 -118.527 20.427
    34.315 -118.6    5.000
    """  # noqa
    cbuf = io.StringIO(rupture_text)
    with pytest.raises(Exception):
        get_rupture(origin, cbuf, new_format=False)

    # incorrect delimiter
    rupture_text = """#Test
    34.315;-118.421;5.000
    34.401;-118.587;5.000
    34.261;-118.693;20.427
    34.175;-118.527;20.427
    34.315;-118.421;5.000
    """  # noqa
    cbuf = io.StringIO(rupture_text)
    with pytest.raises(Exception):
        get_rupture(origin, cbuf, new_format=False)

    # incorrect delimiter, new format
    rupture_text = """#Test
    34.315;-118.421;5.000
    34.401;-118.587;5.000
    34.261;-118.693;20.427
    34.175;-118.527;20.427
    34.315;-118.421;5.000
    """  # noqa
    cbuf = io.StringIO(rupture_text)
    with pytest.raises(Exception):
        get_rupture(origin, cbuf, new_format=True)

    # Not 3 columns
    rupture_text = """#Test
    34.315 -118.421;5.000
    34.401 -118.587;5.000
    34.261 -118.693;20.427
    34.175 -118.527;20.427
    34.315 -118.421;5.000
    """  # noqa
    cbuf = io.StringIO(rupture_text)
    with pytest.raises(Exception):
        get_rupture(origin, cbuf, new_format=False)

    # Json incorrect
    test = {
        "metadata": {
            "id": "test",
            "mag": 7.0,
            "lon": 0,
            "mech": "ALL",
            "depth": 5.0,
            "time": "2018-07-02T22:50:03Z",
            "netid": "us",
            "rake": 0.0,
            "lat": 0,
            "network": "",
            "locstring": "",
            "reference": "Test"
        },
        "features": [{
            "type": "Feature",
            "geometry": {
                "coordinates": [[[[-118.421, 34.315, 5.0],
                                  [-118.587, 34.401, 5.0],
                                  [-118.693, 34.261, 20.427],
                                  [-118.527, 34.175, 20.427],
                                  [-118.421, 34.315, 5.0]]]],
                "type":
                "MultiPolygon"
            },
            "properties": {
                "rupture type": "rupture extent"
            }
        }],
        "type":
        "FeatureCollection"
    }

    # incorrect type
    test_incorrect = copy.deepcopy(test)
    test_incorrect['type'] = 'Feature'
    with pytest.raises(Exception) as e:
        validate_json(test_incorrect)
    print(str(e))

    # Incorrect number of features
    test_incorrect = copy.deepcopy(test)
    test_incorrect['features'].append(['wrong'])
    with pytest.raises(Exception) as e:
        validate_json(test_incorrect)
    print(str(e))

    # no reference
    test_incorrect = copy.deepcopy(test)
    test_incorrect['metadata'].pop('reference', None)
    with pytest.raises(Exception) as e:
        validate_json(test_incorrect)
    print(str(e))

    # incorrect feature type
    test_incorrect = copy.deepcopy(test)
    test_incorrect['features'][0]['type'] = 'fred'
    with pytest.raises(Exception) as e:
        validate_json(test_incorrect)
    print(str(e))

    # incorrect feature geometry type
    test_incorrect = copy.deepcopy(test)
    test_incorrect['features'][0]['geometry']['type'] = 'fred'
    with pytest.raises(Exception) as e:
        validate_json(test_incorrect)
    print(str(e))

    # no coordinates
    test_incorrect = copy.deepcopy(test)
    test_incorrect['features'][0]['geometry'].pop('coordinates', None)
    with pytest.raises(Exception) as e:
        validate_json(test_incorrect)
    print(str(e))
Esempio n. 6
0
def test_parse_complicated_rupture():
    rupture_text = """# SOURCE: Barka, A., H. S. Akyz, E. Altunel, G. Sunal, \
Z. Akir, A. Dikbas, B. Yerli, R. Armijo, B. Meyer, J. B. d. Chabalier, \
T. Rockwell, J. R. Dolan, R. Hartleb, T. Dawson, S. Christofferson, \
A. Tucker, T. Fumal, R. Langridge, H. Stenner, W. Lettis, J. Bachhuber, \
and W. Page (2002). The Surface Rupture and Slip Distribution of the \
17 August 1999 Izmit Earthquake (M 7.4), North Anatolian Fault, Bull. \
Seism. Soc. Am. 92, 43-60.
    29.33760 40.70985 0
    29.51528 40.72733 0
    29.51528 40.72933 20
    29.33760 40.71185 20
    29.33760 40.70985 0
    >
    29.61152 40.70513 0
    29.87519 40.74903 0
    29.87519 40.75103 20
    29.61152 40.70713 20
    29.61152 40.70513 0
    >
    29.88662 40.72582 0
    30.11126 40.72336 0
    30.19265 40.73432 0
    30.19265 40.73632 20
    30.11126 40.72536 20
    29.88662 40.72782 20
    29.88662 40.72582 0
    >
    30.30494 40.71210 0
    30.46540 40.71081 0
    30.56511 40.70739 0
    30.56511 40.70939 20
    30.46540 40.71281 20
    30.30494 40.71410 20
    30.30494 40.71210 0
    >
    30.57658 40.71621 0
    30.63731 40.70068 0
    30.63731 40.70268 20
    30.57658 40.71821 20
    30.57658 40.71621 0
    >
    30.72900 40.69947 0
    30.93655 40.79654 0
    30.93655 40.79854 20
    30.72900 40.70147 20
    30.72900 40.69947 0
    >
    30.94688 40.80199 0
    31.01799 40.84501 0
    31.01799 40.84701 20
    30.94688 40.80399 20
    30.94688 40.80199 0"""  # noqa

    # Rupture requires an origin even when not used:
    origin = Origin({
        'id': 'test',
        'lon': 0,
        'lat': 0,
        'depth': 5.0,
        'mag': 7.0,
        'netid': 'us',
        'network': '',
        'locstring': '',
        'time': HistoricTime.utcfromtimestamp(time.time())
    })
    cbuf = io.StringIO(rupture_text)
    rupture = get_rupture(origin, cbuf)
    strike = rupture.getStrike()
    np.testing.assert_allclose(strike, -100.46, atol=0.01)
    dip = rupture.getDip()
    np.testing.assert_allclose(dip, 89.40, atol=0.01)
    L = rupture.getLength()
    np.testing.assert_allclose(L, 119.56, atol=0.01)
    W = rupture.getWidth()
    np.testing.assert_allclose(W, 20.0, atol=0.01)
    nq = rupture.getNumQuads()
    np.testing.assert_allclose(nq, 9)
    ng = rupture.getNumGroups()
    np.testing.assert_allclose(ng, 7)
    sind = rupture._getGroupIndex()
    np.testing.assert_allclose(sind, [0, 1, 2, 2, 3, 3, 4, 5, 6])
    ztor = rupture.getDepthToTop()
    np.testing.assert_allclose(ztor, 0, atol=0.01)
    itl = rupture.getIndividualTopLengths()
    itl_d = np.array([
        15.13750778, 22.80237887, 18.98053425, 6.98263853, 13.55978731,
        8.43444811, 5.41399812, 20.57788056, 7.66869463
    ])
    np.testing.assert_allclose(itl, itl_d, atol=0.01)
    iw = rupture.getIndividualWidths()
    iw_d = np.array([
        20.00122876, 20.00122608, 20.00120173, 20.00121028, 20.00121513,
        20.00121568, 20.00107293, 20.00105498, 20.00083348
    ])
    np.testing.assert_allclose(iw, iw_d, atol=0.01)
    lats = rupture.lats
    lats_d = np.array([
        40.72733, 40.70985, 40.71185, 40.72932969, 40.72733, np.nan, 40.74903,
        40.70513, 40.70713, 40.75102924, 40.74903, np.nan, 40.72336, 40.72582,
        40.72336, 40.72536, 40.72782, 40.72536004, 40.72336, np.nan, 40.71081,
        40.7121, 40.71081, 40.71281, 40.7141, 40.71281002, 40.71081, np.nan,
        40.70068, 40.71621, 40.71821, 40.70268025, 40.70068, np.nan, 40.79654,
        40.69947, 40.70147, 40.79853872, 40.79654, np.nan, 40.84501, 40.80199,
        40.80399, 40.84700952, 40.84501, np.nan
    ])
    np.testing.assert_allclose(lats, lats_d, atol=0.001)
    lons = rupture.lons
    lons_d = np.array([
        29.51528, 29.3376, 29.3376, 29.51528005, 29.51528, np.nan, 29.87519,
        29.61152, 29.61152, 29.87519021, 29.87519, np.nan, 30.11126, 29.88662,
        30.11126, 30.11126, 29.88662, 30.11126, 30.11126, np.nan, 30.4654,
        30.30494, 30.4654, 30.4654, 30.30494, 30.4654, 30.4654, np.nan,
        30.63731, 30.57658, 30.57658, 30.63731011, 30.63731, np.nan, 30.93655,
        30.729, 30.729, 30.93655103, 30.93655, np.nan, 31.01799, 30.94688,
        30.94688, 31.0179905, 31.01799, np.nan
    ])

    np.testing.assert_allclose(lons, lons_d, atol=0.001)
Esempio n. 7
0
def test_northridge():
    rupture_text = """# Source: Wald, D. J., T. H. Heaton, and K. W. Hudnut \
(1996). The Slip History of the 1994 Northridge, California, Earthquake \
Determined from Strong-Motion, Teleseismic, GPS, and Leveling Data, Bull. \
Seism. Soc. Am. 86, S49-S70.
    -118.421 34.315  5.000
    -118.587 34.401  5.000
    -118.693 34.261 20.427
    -118.527 34.175 20.427
    -118.421 34.315 5.000
    """  # noqa

    # Rupture requires an origin even when not used:
    origin = Origin({
        'id': 'test',
        'lon': 0,
        'lat': 0,
        'depth': 5.0,
        'mag': 7.0,
        'netid': 'us',
        'network': '',
        'locstring': '',
        'time': HistoricTime.utcfromtimestamp(time.time())
    })

    cbuf = io.StringIO(rupture_text)
    rupture = get_rupture(origin, cbuf)
    strike = rupture.getStrike()
    np.testing.assert_allclose(strike, 122.06, atol=0.01)
    dip = rupture.getDip()
    np.testing.assert_allclose(dip, 40.21, atol=0.01)
    L = rupture.getLength()
    np.testing.assert_allclose(L, 17.99, atol=0.01)
    W = rupture.getWidth()
    np.testing.assert_allclose(W, 23.94, atol=0.01)
    nq = rupture.getNumQuads()
    np.testing.assert_allclose(nq, 1)
    ng = rupture.getNumGroups()
    np.testing.assert_allclose(ng, 1)
    nd = rupture.getDeps()
    np.testing.assert_allclose(nd, [5.0, 5.0, 20.427, 20.427, np.nan])
    sind = rupture._getGroupIndex()
    np.testing.assert_allclose(sind, [0])
    ztor = rupture.getDepthToTop()
    np.testing.assert_allclose(ztor, 5, atol=0.01)
    itl = rupture.getIndividualTopLengths()
    np.testing.assert_allclose(itl, 17.99, atol=0.01)
    iw = rupture.getIndividualWidths()
    np.testing.assert_allclose(iw, 23.94, atol=0.01)
    lats = rupture.lats
    lats_d = np.array([34.401, 34.315, 34.175, 34.261, 34.401, np.nan])
    np.testing.assert_allclose(lats, lats_d, atol=0.01)
    lons = rupture.lons
    lons_d = np.array(
        [-118.587, -118.421, -118.527, -118.693, -118.587, np.nan])
    np.testing.assert_allclose(lons, lons_d, atol=0.01)
    ln, lt, de = rupture.getRuptureAsArrays()
    np.testing.assert_allclose(
        ln,
        np.array([-118.421, -118.587, -118.693, -118.527, np.nan]),
        atol=0.01)
    np.testing.assert_allclose(lt,
                               np.array(
                                   [34.315, 34.401, 34.261, 34.175, np.nan]),
                               atol=0.01)
    np.testing.assert_allclose(de, [5.0, 5.0, 20.427, 20.427, np.nan])
    mesh = rupture.getRuptureAsMesh()
    np.testing.assert_allclose(
        mesh.lons, [-118.421, -118.587, -118.693, -118.527, np.nan])
    np.testing.assert_allclose(mesh.lats,
                               [34.315, 34.401, 34.261, 34.175, np.nan])
    np.testing.assert_allclose(mesh.depths, [5., 5., 20.427, 20.427, np.nan])
Esempio n. 8
0
def test_QuadRupture():

    # Rupture requires an origin even when not used:
    origin = Origin({
        'id': 'test',
        'lon': 0,
        'lat': 0,
        'depth': 5.0,
        'mag': 7.0,
        'netid': 'us',
        'network': '',
        'locstring': '',
        'time': HistoricTime.utcfromtimestamp(time.time())
    })

    # First with json file
    file = os.path.join(homedir, 'rupture_data/izmit.json')
    rupj = get_rupture(origin, file)
    # Then with text file:
    file = os.path.join(homedir, 'rupture_data/Barkaetal02_fault.txt')
    rupt = get_rupture(origin, file)

    np.testing.assert_allclose(rupj.lats, rupt.lats, atol=1e-5)
    np.testing.assert_allclose(rupj.lons, rupt.lons, atol=1e-5)
    np.testing.assert_allclose(rupj._depth, rupt._depth, atol=1e-5)
    np.testing.assert_allclose(rupt.getArea(), 2391.2822653900268, atol=1e-5)

    target = np.array([
        29.51528, 29.3376, 29.3376, 29.51528005, 29.51528, np.nan, 29.87519,
        29.61152, 29.61152, 29.87519021, 29.87519, np.nan, 30.11126, 29.88662,
        30.11126, 30.11126, 29.88662, 30.11126, 30.11126, np.nan, 30.4654,
        30.30494, 30.4654, 30.4654, 30.30494, 30.4654, 30.4654, np.nan,
        30.63731, 30.57658, 30.57658, 30.63731011, 30.63731, np.nan, 30.93655,
        30.729, 30.729, 30.93655103, 30.93655, np.nan, 31.01799, 30.94688,
        30.94688, 31.0179905, 31.01799, np.nan
    ])
    np.testing.assert_allclose(rupj.lons, target, atol=1e-5)
    target = np.array([
        40.72733, 40.70985, 40.71185, 40.72932969, 40.72733, np.nan, 40.74903,
        40.70513, 40.70713, 40.75102924, 40.74903, np.nan, 40.72336, 40.72582,
        40.72336, 40.72536, 40.72782, 40.72536004, 40.72336, np.nan, 40.71081,
        40.7121, 40.71081, 40.71281, 40.7141, 40.71281002, 40.71081, np.nan,
        40.70068, 40.71621, 40.71821, 40.70268025, 40.70068, np.nan, 40.79654,
        40.69947, 40.70147, 40.79853872, 40.79654, np.nan, 40.84501, 40.80199,
        40.80399, 40.84700952, 40.84501, np.nan
    ])
    np.testing.assert_allclose(rupj.lats, target, atol=1e-5)
    target = np.array([
        -0.00000000e+00, -0.00000000e+00, 2.00000000e+01, 1.99999325e+01,
        -0.00000000e+00, np.nan, -9.31322575e-13, -0.00000000e+00,
        2.00000000e+01, 1.99998304e+01, -9.31322575e-13, np.nan,
        9.31322575e-13, -0.00000000e+00, 9.31322575e-13, 2.00000000e+01,
        2.00000000e+01, 2.00000095e+01, 9.31322575e-13, np.nan,
        -0.00000000e+00, -0.00000000e+00, -0.00000000e+00, 2.00000000e+01,
        2.00000000e+01, 2.00000050e+01, -0.00000000e+00, np.nan,
        -0.00000000e+00, -0.00000000e+00, 2.00000000e+01, 2.00000600e+01,
        -0.00000000e+00, np.nan, -0.00000000e+00, -0.00000000e+00,
        2.00000000e+01, 1.99996249e+01, -0.00000000e+00, np.nan,
        -0.00000000e+00, -0.00000000e+00, 2.00000000e+01, 1.99998338e+01,
        -0.00000000e+00, np.nan
    ])
    np.testing.assert_allclose(rupj.depths, target, atol=1e-5)
Esempio n. 9
0
def test_EdgeRupture():

    # Rupture requires an origin even when not used:
    origin = Origin({
        'id': 'test',
        'lon': 0,
        'lat': 0,
        'depth': 5.0,
        'mag': 7.0,
        'netid': 'us',
        'network': '',
        'locstring': '',
        'time': HistoricTime.utcfromtimestamp(time.time())
    })

    file = os.path.join(homedir, 'rupture_data/cascadia.json')
    rup = get_rupture(origin, file)
    np.testing.assert_allclose(rup.getArea(), 105635.92827547337)

    # Force read Northridge as EdgeRupture
    file = os.path.join(homedir, 'rupture_data/northridge_fault.txt')
    d = text_to_json(file, new_format=True)
    rupt = EdgeRupture(d, origin)
    strike = rupt.getStrike()
    np.testing.assert_allclose(strike, 121.97, atol=0.01)
    dip = rupt.getDip()
    np.testing.assert_allclose(dip, 40.12, atol=0.01)
    L = rupt.getLength()
    np.testing.assert_allclose(L, 17.99, atol=0.01)
    W = rupt.getWidth()
    np.testing.assert_allclose(W, 23.92, atol=0.01)
    ztor = rupt.getDepthToTop()
    np.testing.assert_allclose(ztor, 5, atol=0.01)

    # And again for the same vertices but reversed order
    file = os.path.join(homedir, 'rupture_data/northridge_fixed_fault.txt')
    d = text_to_json(file, new_format=True)
    rupt = EdgeRupture(d, origin)
    strike = rupt.getStrike()
    np.testing.assert_allclose(strike, 121.97, atol=0.01)
    dip = rupt.getDip()
    np.testing.assert_allclose(dip, 40.12, atol=0.01)
    L = rupt.getLength()
    np.testing.assert_allclose(L, 17.99, atol=0.01)
    W = rupt.getWidth()
    np.testing.assert_allclose(W, 23.92, atol=0.01)
    ztor = rupt.getDepthToTop()
    np.testing.assert_allclose(ztor, 5, atol=0.01)

    # Test for fromArrays method
    toplats = np.array([37.0, 38.0])
    toplons = np.array([-120.0, -120.0])
    topdeps = np.array([0.0, 0.0])
    botlats = copy.copy(toplats)
    botlons = copy.copy(toplons)
    botdeps = np.array([10.0, 10.0])
    erup = EdgeRupture.fromArrays(toplons, toplats, topdeps, botlons, botlats,
                                  botdeps, origin)
    # Error: array lengths differ
    with pytest.raises(Exception) as e:
        qrup = QuadRupture.fromVertices(
            [toplons[0]], [toplats[0]], [topdeps[0]], [toplons[1]],
            [toplats[1]], [topdeps[1]], [botlons[1]], [botlats[1]],
            [botdeps[1]], [botlons[0]], [botlats[0]], [botdeps[0]][:-1],
            origin)
    print(str(e))

    # Error: group index too long
    with pytest.raises(Exception) as e:
        qrup = QuadRupture.fromVertices([toplons[0]], [toplats[0]],
                                        [topdeps[0]], [toplons[1]],
                                        [toplats[1]], [topdeps[1]],
                                        [botlons[1]], [botlats[1]],
                                        [botdeps[1]], [botlons[0]],
                                        [botlats[0]], [botdeps[0]],
                                        origin,
                                        group_index=[0, 0, 0, 0, 0, 0])
    print(str(e))

    qrup = QuadRupture.fromVertices([toplons[0]], [toplats[0]], [topdeps[0]],
                                    [toplons[1]], [toplats[1]], [topdeps[1]],
                                    [botlons[1]], [botlats[1]], [botdeps[1]],
                                    [botlons[0]], [botlats[0]], [botdeps[0]],
                                    origin)
    np.testing.assert_allclose(erup.getArea(), 1108.9414759967776)
    np.testing.assert_allclose(erup.getDepthToTop(), 0)
    np.testing.assert_allclose(erup.getLength(), 111.19492664455889)
    np.testing.assert_allclose(erup.lats,
                               np.array([37., 38., 38., 37., 37., np.nan]))
    np.testing.assert_allclose(
        erup.lons, np.array([-120., -120., -120., -120., -120., np.nan]))
    np.testing.assert_allclose(erup.depths,
                               np.array([0., 0., 10., 10., 0., np.nan]))
    np.testing.assert_allclose(erup._getGroupIndex(), np.array([0., 0.]))
    quads = erup.getQuadrilaterals()
    np.testing.assert_allclose(quads[0][0].x, -120.0)

    # Need to also test the distances with EdgeRupture
    lons = np.linspace(-120.1, -121.0, 10)
    lats = np.linspace(37.0, 38, 10)
    deps = np.zeros_like(lons)
    rrup1, _ = qrup.computeRrup(lons, lats, deps)
    rrup2, _ = erup.computeRrup(lons, lats, deps)
    np.testing.assert_allclose(rrup1, rrup2, atol=2e-2)
    rjb1, _ = qrup.computeRjb(lons, lats, deps)
    rjb2, _ = erup.computeRjb(lons, lats, deps)
    np.testing.assert_allclose(rjb1, rjb2, atol=2e-2)
    gc2 = erup.computeGC2(lons, lats, deps)
    targetRy0 = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.67335931])
    targetRx = np.array([
        -8.88024949, -17.73390996, -26.56167797, -35.3634266, -44.13902929,
        -52.88835984, -61.61129242, -70.30770154, -78.97746209, -87.6204493
    ])
    np.testing.assert_allclose(gc2['ry0'], targetRy0)
    np.testing.assert_allclose(gc2['rx'], targetRx)
Esempio n. 10
0
def test_rupture_from_dict():

    # Grab an EdgeRupture
    origin = Origin({
        'id': 'test',
        'lat': 0,
        'lon': 0,
        'depth': 5.0,
        'mag': 7.0,
        'netid': 'us',
        'network': '',
        'locstring': '',
        'time': HistoricTime.utcfromtimestamp(time.time())
    })

    file = os.path.join(homedir, 'rupture_data/cascadia.json')
    rup_original = get_rupture(origin, file)
    d = rup_original._geojson
    rup_from_dict = rupture_from_dict(d)
    assert rup_from_dict._mesh_dx == 0.5

    # Specify mesh_dx
    rup_original = get_rupture(origin, file, mesh_dx=1.0)
    d = rup_original._geojson
    rup_from_dict = rupture_from_dict(d)
    assert rup_from_dict._mesh_dx == 1.0

    # Quad rupture
    file = os.path.join(homedir, 'rupture_data/izmit.json')
    rup_original = get_rupture(origin, file)
    d = rup_original._geojson
    rup_from_dict = rupture_from_dict(d)
    assert rup_from_dict.getArea() == rup_original.getArea()
    # Note, there's a bit of an inconsistency highlighted here because
    # magnitude has key 'magnitude' in the izmit file, but 'mag' in
    # the origin and both get retained.

    # Point rupture from json
    file = os.path.join(homedir, 'rupture_data/point.json')
    rup = get_rupture(origin, file)
    assert rup.lats == 0
    assert rup.lons == 0

    # Point rupture
    origin = Origin({
        'id': 'test',
        'lon': -122.5,
        'lat': 37.3,
        'depth': 5.0,
        'mag': 7.0,
        'netid': 'us',
        'network': '',
        'locstring': '',
        'time': HistoricTime.utcfromtimestamp(time.time())
    })

    rup_original = get_rupture(origin)
    d = rup_original._geojson
    rup_from_dict = rupture_from_dict(d)
    assert rup_from_dict.lats == 37.3
    assert rup_from_dict.lons == -122.5

    assert rup_original.getLength() is None
    assert rup_original.getWidth() == constants.DEFAULT_WIDTH
    assert rup_original.getArea() is None
    assert rup_original.getStrike() == constants.DEFAULT_STRIKE
    assert rup_original.getDip() == constants.DEFAULT_DIP
    assert rup_original.getDepthToTop() == constants.DEFAULT_ZTOR
    assert rup_original.getQuadrilaterals() is None
    assert rup_original.depths == 5.0
    # No mech, no tectonic region
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [42.757296, 46.614723])
    else:
        print(rjb[0], rrup[0])
    # Various combinations of mech and tectonic region...
    rup_original._origin._tectonic_region = 'Active Shallow Crust'
    rup_original._origin.mech = 'ALL'
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [42.757296, 46.614723])
    else:
        print(rjb[0], rrup[0])
    rup_original._origin.mech = 'RS'
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [39.779893, 44.033556])
    else:
        print(rjb[0], rrup[0])
    rup_original._origin.mech = 'NM'
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [40.937772, 45.254891])
    else:
        print(rjb[0], rrup[0])
    rup_original._origin.mech = 'SS'
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [46.750567, 48.108934])
    else:
        print(rjb[0], rrup[0])
    rup_original._origin._tectonic_region = 'Stable Shallow Crust'
    rup_original._origin.mech = 'ALL'
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [43.676648, 48.008276])
    else:
        print(rjb[0], rrup[0])
    rup_original._origin.mech = 'RS'
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [42.445057, 46.865434])
    else:
        print(rjb[0], rrup[0])
    rup_original._origin.mech = 'NM'
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [43.233314, 47.563079])
    else:
        print(rjb[0], rrup[0])
    rup_original._origin.mech = 'SS'
    rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                     np.array([0.0]))
    rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [47.829729, 50.087485])
    else:
        print(rjb[0], rrup[0])
    rup_original._origin._tectonic_region = 'Somewhere Else'
    rup_original._origin.mech = 'ALL'
    rjb, var = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]),
                                       np.array([0.0]))
    rrup, var = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]),
                                         np.array([0.0]))
    if do_tests is True:
        np.testing.assert_allclose([rjb[0], rrup[0]], [42.757296, 46.614723])
    else:
        print(rjb[0], rrup[0])

    # This is just zeroes now, so there's not much to check
    gc2 = rup_original.computeGC2(np.array([-122.0]), np.array([37.0]),
                                  np.array([0.0]))
    assert gc2['rx'][0] == 0