Ejemplo n.º 1
0
def test_history_data_frame():
    # SMOKE TEST
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'dataframes_history.yaml')
    with vcr.use_cassette(tape_file):
        nc72852151 = get_event_by_id('nc72852151', includesuperseded=True)
        (history, event) = get_history_data_frame(nc72852151, ['shakemap', 'dyfi',
                                                               'losspager', 'oaf',
                                                               'finite-fault',
                                                               'focal-mechanism',
                                                               'ground-failure',
                                                               'moment-tensor',
                                                               'phase-data',
                                                               'origin'])
        us10008e3k = get_event_by_id('us10008e3k', includesuperseded=True)
        (history, event) = get_history_data_frame(us10008e3k, ['shakemap', 'dyfi',
                                                               'oaf',
                                                               'finite-fault',
                                                               'focal-mechanism',
                                                               'moment-tensor'])
        us10007uph = get_event_by_id('us10007uph', includesuperseded=True)
        (history, event) = get_history_data_frame(us10007uph, ['shakemap', 'dyfi',
                                                               'oaf',
                                                               'finite-fault',
                                                               'focal-mechanism',
                                                               'ground-failure',
                                                               'moment-tensor',
                                                               'phase-data'])
Ejemplo n.º 2
0
def test_geocoded():
    # first, test event with 10k and 1k geojson data
    eventid = 'ci14607652'
    detail = get_event_by_id(eventid)
    df, msg = _get_dyfi_dataframe(detail)
    np.testing.assert_almost_equal(df['intensity'].sum(), 4510.1)

    # next, test event with only geocoded (?) resolution text data
    eventid = 'ci14745580'
    detail = get_event_by_id(eventid)
    df, msg = _get_dyfi_dataframe(detail)
    np.testing.assert_almost_equal(df['intensity'].sum(), 800.4)
Ejemplo n.º 3
0
def main(args):
    detail = get_event_by_id(args.eventid)
    if 'shakemap' not in detail.products:
        print(f'No shakemap exists for event {args.eventid}. Exiting.')
        sys.exit(1)

    outdir_p = pathlib.Path(args.outdir)
    if not outdir_p.exists():
        outdir_p.mkdir(parents=True)
    products = detail.getProducts(args.product, source=args.source)
    if not len(products):
        print(f'No shakemap found for source {args.source}. Exiting.')
        sys.exit(1)

    product = products[0]
    eventdir = outdir_p / args.eventid / args.product
    if not eventdir.exists():
        eventdir.mkdir(parents=True)
    nfiles = 0
    for content in product.contents:
        fname = content.replace('/', pathlib.os.sep)
        outfile = eventdir / fname
        fdir = outfile.parent
        if not fdir.exists():
            fdir.mkdir(parents=True)
        print(f'Downloading {content}...')
        product.getContent(content, outfile)
        nfiles += 1

    print(f'Downloaded {nfiles} files to {eventdir}.')
Ejemplo n.º 4
0
def test_product():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'classes_product.yaml')
    with vcr.use_cassette(tape_file):
        eventid = 'ci3144585'  # northridge
        event = get_event_by_id(eventid)
        product = event.getProducts('shakemap')[0]
        assert product.preferred_weight == 100000000
        assert product.source == 'atlas'
        assert product.update_time >= datetime(2017, 4, 12, 6, 25, 42, 120000)
        pnames = product.getContentsMatching('grid.xml')
        url = product.getContentURL('grid.xml')
        assert url == ('https://earthquake.usgs.gov/archive/product/'
                       'shakemap/ci3144585/atlas/1594159786829/download/'
                       'grid.xml')
        assert len(product.getContentsMatching('foo')) == 0
        assert len(pnames) == 1
        cmpstr = ('Product shakemap from atlas updated '
                  '2020-07-07 22:09:46.829000 containing '
                  '58 content files.')
        assert str(product) == cmpstr
        assert product.hasProperty('maxmmi')
        assert 'maxmmi' in product.properties
        assert product['maxmmi'] >= '8.6'
        assert 'download/cont_mi.json' in product.contents
        assert product.getContentName('grid.xml') == 'grid.xml'
        assert product.getContentName('foo') is None
        assert product.getContentURL('foo') is None

        try:
            product.getContent('foo', filename=None)
            assert 1 == 2
        except ContentNotFoundError:
            pass

        try:
            product['foo']
            assert 1 == 2
        except AttributeError:
            pass

        try:
            handle, tfilename = tempfile.mkstemp()
            os.close(handle)
            product.getContent('info.json', tfilename)
            f = open(tfilename, 'rt')
            jdict = json.load(f)
            f.close()
            assert float(jdict['input']['event_information']['depth']) > 18.0
        except Exception:
            raise Exception('Failure to download Product content file')
        finally:
            os.remove(tfilename)

        # test getting content as a string.
        infobytes, url = product.getContentBytes('info.json')
        infostring = infobytes.decode('utf-8')
        jdict = json.loads(infostring)
        eid = jdict['input']['event_information']['event_id']
        assert eid == 'ci3144585'
Ejemplo n.º 5
0
def main():
    desc = "Program to get CSV file of station data from ShakeMap."
    parser = argparse.ArgumentParser(description=desc)
    parser.add_argument('eventid', help='Comcat event ID.')
    args = parser.parse_args()
    evid = args.eventid

    # Download stationlist
    event = get_event_by_id(evid)
    shakemap = event.getProducts('shakemap')[0]
    json_file = evid + "_stationlist.json"
    shakemap.getContent('stationlist.json', json_file)
    with open(json_file) as f:
        station_dict = json.load(f)

    # Extract info in tabular form
    out_dict = OrderedDict()
    out_dict['lat'] = []
    out_dict['lon'] = []
    out_dict['rjb'] = []
    out_dict['repi'] = []
    out_dict['pga_percent_g'] = []
    out_dict['pgv_cm_s'] = []
    for f in station_dict['features']:
        if f['properties']['station_type'] == 'seismic':
            out_dict['lon'].append(f['geometry']['coordinates'][0])
            out_dict['lat'].append(f['geometry']['coordinates'][1])
            out_dict['rjb'].append(f['properties']['distances']['rjb'])
            out_dict['repi'].append(f['properties']['distances']['repi'])
            out_dict['pga_percent_g'].append(f['properties']['pga'])
            out_dict['pgv_cm_s'].append(f['properties']['pgv'])

    out_file = evid + "_stationlist.csv"
    out_df = pd.DataFrame(out_dict)
    out_df.to_csv(out_file, index=False)
Ejemplo n.º 6
0
def test_phase_dataframe():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "dataframes_phase.yaml")
    with vcr.use_cassette(tape_file, record_mode="new_episodes"):
        detail = get_event_by_id("us1000778i")  # 2016 NZ event
        df = get_phase_dataframe(detail, catalog="us")
        assert len(df) == 174
Ejemplo n.º 7
0
def sort_shift(df, st, dayst, sr):
    st.sort()
    dayst.sort()
    # check to make sure same length
    if len(st) > len(dayst):
        # remove extra traces from st
        for tr in st:
            if len(
                    dayst.select(station=tr.stats.station,
                                 channel=tr.stats.channel)) == 0:
                st.remove(tr)
    # gets number of samples between template time and event origin time
    origintime = UTCDateTime(df['Date'] + 'T' + df['Time'])
    regional = df['Regional']
    eventid = regional + str(df['ID'])
    detail = get_event_by_id(eventid, includesuperseded=True)
    phases = get_phase_dataframe(detail, catalog=regional)
    phases = phases[phases['Status'] == 'manual']
    shifts = np.zeros(len(st), dtype=int)
    for ii in range(len(phases)):
        net = phases.iloc[ii]['Channel'].split('.')[0]
        sta = phases.iloc[ii]['Channel'].split('.')[1]
        comp = phases.iloc[ii]['Channel'].split('.')[2]
        arr = UTCDateTime(phases.iloc[ii]['Arrival Time'])
        shift = int(np.round((arr - origintime) * sr))
        for jj in range(len(st)):
            if sta == st[jj].stats.station and comp == st[jj].stats.channel:
                print(sta + " " + comp + " " + str(shift))
                shifts[jj] = shift
    return shifts, st, dayst, phases
Ejemplo n.º 8
0
def test_product():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "classes_product.yaml")
    with vcr.use_cassette(tape_file):
        eventid = "ci3144585"  # northridge
        event = get_event_by_id(eventid)
        product = event.getProducts("shakemap")[0]
        assert product.preferred_weight == 100000000
        assert product.source == "atlas"
        assert product.update_time >= datetime(2017, 4, 12, 6, 25, 42, 120000)
        pnames = product.getContentsMatching("grid.xml")
        url = product.getContentURL("grid.xml")
        cmpurl = ("https://earthquake.usgs.gov/product/shakemap/"
                  "ci3144585/atlas/1594159786829/download/grid.xml")
        assert url == cmpurl
        assert len(product.getContentsMatching("foo")) == 0
        assert len(pnames) == 1
        cmpstr = ("Product shakemap from atlas updated "
                  "2020-07-07 22:09:46.829000 containing "
                  "58 content files.")
        assert str(product) == cmpstr
        assert product.hasProperty("maxmmi")
        assert "maxmmi" in product.properties
        assert product["maxmmi"] >= "8.6"
        assert "download/cont_mi.json" in product.contents
        assert product.getContentName("grid.xml") == "grid.xml"
        assert product.getContentName("foo") is None
        assert product.getContentURL("foo") is None

        try:
            product.getContent("foo", filename=None)
            assert 1 == 2
        except ContentNotFoundError:
            pass

        try:
            product["foo"]
            assert 1 == 2
        except AttributeError:
            pass

        try:
            handle, tfilename = tempfile.mkstemp()
            os.close(handle)
            product.getContent("info.json", tfilename)
            f = open(tfilename, "rt")
            jdict = json.load(f)
            f.close()
            assert float(jdict["input"]["event_information"]["depth"]) > 18.0
        except Exception:
            raise Exception("Failure to download Product content file")
        finally:
            os.remove(tfilename)

        # test getting content as a string.
        infobytes, url = product.getContentBytes("info.json")
        infostring = infobytes.decode("utf-8")
        jdict = json.loads(infostring)
        eid = jdict["input"]["event_information"]["event_id"]
        assert eid == "ci3144585"
Ejemplo n.º 9
0
def get_event_dict(eventid):
    """Get event dictionary from ComCat using event ID.

    Args:
        eventid (str):
            Event ID that can be found in ComCat.

    Returns:
        dict: Dictionary containing fields:
            - id String event ID
            - time UTCDateTime of event origin time.
            - lat Origin latitude.
            - lon Origin longitude.
            - depth Origin depth.
            - magnitude Origin magnitude.
    """
    dict_or_id = get_event_by_id(eventid)
    if dict_or_id.id != eventid:
        logging.warn(
            "Event ID %s is no longer preferred. Updating with the "
            "preferred event ID: %s." % (eventid, dict_or_id.id)
        )
    event_dict = {
        "id": dict_or_id.id,
        "time": UTCDateTime(dict_or_id.time),
        "lat": dict_or_id.latitude,
        "lon": dict_or_id.longitude,
        "depth": dict_or_id.depth,
        "magnitude": dict_or_id.magnitude,
        "magnitude_type": dict_or_id._jdict["properties"]["magType"],
    }
    return event_dict
Ejemplo n.º 10
0
    def execute(self):
        """
        Write info.json metadata file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            os.makedirs(datadir)

        # try to find the event by our event id
        try:
            detail = get_event_by_id(self._eventid)
        except Exception as e:
            fmt = 'Could not retrieve DYFI data for %s - error "%s"'
            self.logger.warning(fmt % (self._eventid, str(e)))
            return

        dataframe, msg = _get_dyfi_dataframe(detail)
        if dataframe is None:
            self.logger.info(msg)
            return

        reference = 'USGS Did You Feel It? System'
        xmlfile = os.path.join(datadir, 'dyfi_dat.xml')
        dataframe_to_xml(dataframe, xmlfile, reference)
        self.logger.info('Wrote %i DYFI records to %s' %
                         (len(dataframe), xmlfile))
Ejemplo n.º 11
0
    def execute(self):
        """
        Write ugroundmotion_dat.json metadata file.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        _, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current')
        if not os.path.isdir(datadir):
            os.makedirs(datadir)

        # try to find the event by our event id
        try:
            detail = get_event_by_id(self._eventid, host=HOST)
            if not detail.hasProduct('ground-motion'):
                return
            groundmotion = detail.getProducts('ground-motion')[0]
            fname = 'groundmotions_dat.json'
            gbytes, gurl = groundmotion.getContentBytes(fname)
            outname = os.path.join(datadir, 'ugroundmotions_dat.json')
            with open(outname, 'wt') as f:
                f.write(gbytes.decode('utf-8'))
            self.logger.info('Created ground motions data file %s' % outname)
        except Exception as e:
            fmt = 'Could not retrieve ground motion data for %s - error "%s"'
            self.logger.warning(fmt % (self._eventid, str(e)))
            return
Ejemplo n.º 12
0
def get_event_dict(eventid):
    """Get event dictionary from ComCat using event ID.

    Args:
        eventid (str): Event ID that can be found in ComCat.

    Returns:
        dict: Dictionary containing fields:
            - id String event ID
            - time UTCDateTime of event origin time.
            - lat Origin latitude.
            - lon Origin longitude.
            - depth Origin depth.
            - magnitude Origin magnitude.
    """
    dict_or_id = get_event_by_id(eventid)
    event_dict = {
        'id': dict_or_id.id,
        'time': UTCDateTime(dict_or_id.time),
        'lat': dict_or_id.latitude,
        'lon': dict_or_id.longitude,
        'depth': dict_or_id.depth,
        'magnitude': dict_or_id.magnitude,
    }
    return event_dict
Ejemplo n.º 13
0
def test_magnitude_dataframe():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_magnitude_dataframe.yaml')
    with vcr.use_cassette(tape_file):
        detail = get_event_by_id('us1000778i')  # 2016 NZ event
        df = get_phase_dataframe(detail, catalog='us')
        assert len(df) == 174
Ejemplo n.º 14
0
def main(args):
    
    failed = args.failed
    queried = args.queried

    searchlist = []
    idlist = []
    badlist = []
    for filename in os.listdir(failed):
        print ('searching through terminal output %s'%filename)
        with open('%s/%s'%(failed,filename)) as fp:
            for cnt, line in enumerate(fp):
                if line[0:6] == 'Failed':
                    f,t,g,d,v,o,e,id_no = line.split()
                    try:
                        searchobj = get_event_by_id(id_no)
                        searchlist.append(searchobj)
                        idlist.append(id_no)
                    except:
                        badlist.append(id_no)

    print ('getting details for %i events: '%(len(idlist)),idlist)
    print ('these events %i still failed: '%(len(badlist)),badlist)
    detaildf = get_summary_data_frame(searchlist)
    #detaildf = get_detail_data_frame(searchlist,get_tensors='preferred')#,get_moment_supplement=True)
    detaildf.to_csv('%s/%sfailed.csv'%(queried,queried),header=True,index=False,na_rep=np.nan)
Ejemplo n.º 15
0
def get_event_dict(eventid):
    """Get event dictionary from ComCat using event ID.

    Args:
        eventid (str):
            Event ID that can be found in ComCat.

    Returns:
        dict: Dictionary containing fields:
            - id String event ID
            - time UTCDateTime of event origin time.
            - lat Origin latitude.
            - lon Origin longitude.
            - depth Origin depth.
            - magnitude Origin magnitude.
    """
    dict_or_id = get_event_by_id(eventid)
    if dict_or_id.id != eventid:
        logging.warn('Event ID %s is no longer preferred. Updating with the '
                     'preferred event ID: %s.' % (eventid, dict_or_id.id))
    event_dict = {
        'id': dict_or_id.id,
        'time': UTCDateTime(dict_or_id.time),
        'lat': dict_or_id.latitude,
        'lon': dict_or_id.longitude,
        'depth': dict_or_id.depth,
        'magnitude': dict_or_id.magnitude,
        'magnitude_type': dict_or_id._jdict['properties']['magType']
    }
    return event_dict
Ejemplo n.º 16
0
def test_magnitude_dataframe():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'dataframes_magnitude.yaml')
    with vcr.use_cassette(tape_file):
        detail = get_event_by_id('us1000778i')  # 2016 NZ event
        df = get_phase_dataframe(detail, catalog='us')
        assert len(df) == 174
Ejemplo n.º 17
0
def main(args):

    failed = args.failed
    queried = args.queried

    searchlist = []
    idlist = []
    badlist = []
    for filename in os.listdir(failed):
        print('searching through terminal output %s' % filename)
        with open('%s/%s' % (failed, filename)) as fp:
            for cnt, line in enumerate(fp):
                if line[0:6] == 'Failed':
                    f, t, g, d, v, o, e, id_no = line.split()
                    try:
                        searchobj = get_event_by_id(id_no)
                        searchlist.append(searchobj)
                        idlist.append(id_no)
                    except:
                        badlist.append(id_no)

    print('getting details for %i events: ' % (len(idlist)), idlist)
    print('these events %i still failed: ' % (len(badlist)), badlist)
    detaildf = get_summary_data_frame(searchlist)
    #detaildf = get_detail_data_frame(searchlist,get_tensors='preferred')#,get_moment_supplement=True)
    detaildf.to_csv('%s/%sfailed.csv' % (queried, queried),
                    header=True,
                    index=False,
                    na_rep=np.nan)
Ejemplo n.º 18
0
def test_phase_dataframe():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_phase_dataframe.yaml')
    # with vcr.use_cassette(tape_file):
    detail = get_event_by_id('us1000778i')  # 2016 NZ event
    df = get_magnitude_data_frame(detail, 'us', 'mb')
    np.testing.assert_almost_equal(df['Magnitude'].sum(), 756.8100000000001)
    x = 1
Ejemplo n.º 19
0
def test_history_data_frame():
    # SMOKE TEST
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "dataframes_history.yaml")
    products = [
        "shakemap",
        "dyfi",
        "losspager",
        "oaf",
        "finite-fault",
        "focal-mechanism",
        "ground-failure",
        "moment-tensor",
        "phase-data",
        "origin",
    ]

    with vcr.use_cassette(tape_file, record_mode="new_episodes"):
        nc72852151 = get_event_by_id("nc72852151", includesuperseded=True)
        (history, event) = get_history_data_frame(nc72852151, products)
        us10008e3k = get_event_by_id("us10008e3k", includesuperseded=True)
        (history, event) = get_history_data_frame(
            us10008e3k,
            [
                "shakemap",
                "dyfi",
                "oaf",
                "finite-fault",
                "focal-mechanism",
                "moment-tensor",
            ],
        )
        us10007uph = get_event_by_id("us10007uph", includesuperseded=True)
        (history, event) = get_history_data_frame(
            us10007uph,
            [
                "shakemap",
                "dyfi",
                "oaf",
                "finite-fault",
                "focal-mechanism",
                "ground-failure",
                "moment-tensor",
                "phase-data",
            ],
        )
Ejemplo n.º 20
0
def test_magnitude_dataframe():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "dataframes_magnitude.yaml")
    with vcr.use_cassette(tape_file, record_mode="new_episodes"):
        detail = get_event_by_id("us1000778i")  # 2016 NZ event
        df = get_magnitude_data_frame(detail, "us", "mb")
        np.testing.assert_almost_equal(df["Magnitude"].sum(),
                                       756.8100000000001)
Ejemplo n.º 21
0
def test_phase_dataframe():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'dataframes_phase.yaml')
    with vcr.use_cassette(tape_file):
        detail = get_event_by_id('us1000778i')  # 2016 NZ event
        df = get_magnitude_data_frame(detail, 'us', 'mb')
        np.testing.assert_almost_equal(df['Magnitude'].sum(),
                                       756.8100000000001)
Ejemplo n.º 22
0
def test_comcat_data():

    datadir = get_datadir()
    config = get_config()

    # test extraction from comcat event data. The VCR files are
    # example Comcat event datastreams.
    eventid = 'ci14607652'

    # This event has both geo_1km and geo_10km
    tape_file1 = os.path.join(datadir, 'vcr_comcat_geojson.yaml')

    iparser = IntensityParser(eventid=eventid, config=config, network='neic')

    with vcr.use_cassette(tape_file1):
        detail = get_event_by_id(eventid)

    df, msg = comcat.get_dyfi_dataframe_from_comcat(iparser, detail)
    df, msg = iparser.postprocess(df, 'neic')

    np.testing.assert_almost_equal(df['INTENSITY'].sum(), 4510.1)

    reference = 'USGS Did You Feel It? System'
    tempdir = tempfile.mkdtemp(prefix='tmp.', dir=datadir)
    outfile = os.path.join(tempdir, 'dyfi_dat.xml')
    dataframe_to_xml(df, outfile, reference)

    # For debugging save the output with this line:
    # dataframe_to_xml(df, datadir + '/tmp.keepthis.xml', reference)

    outfilesize = os.path.getsize(outfile)
    # Longer size is for file with nresp field
    assert outfilesize == 183953 or outfilesize == 172852
    rmtree(tempdir)

    # This event has only text data
    eventid = 'ci14745580'
    tape_file2 = os.path.join(datadir, 'vcr_comcat_txt.yaml')

    with vcr.use_cassette(tape_file2):
        detail = get_event_by_id(eventid)
        df, msg = comcat.get_dyfi_dataframe_from_comcat(iparser, detail)
        df, msg = iparser.postprocess(df, 'neic')

    np.testing.assert_almost_equal(df['INTENSITY'].sum(), 800.4)
Ejemplo n.º 23
0
def test_moment_supplement():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "classes_moment.yaml")
    with vcr.use_cassette(tape_file):
        eventid = "us2000ar20"  # 2017 M7.1 Mexico City
        detail = get_event_by_id(eventid)
        edict = detail.toDict(get_moment_supplement=True,
                              get_tensors="preferred")
        assert edict["us_Mww_percent_double_couple"] == 0.9992
Ejemplo n.º 24
0
def test_moment_supplement():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_moment.yaml')
    with vcr.use_cassette(tape_file):
        eventid = 'us2000ar20'  # 2017 M7.1 Mexico City
        detail = get_event_by_id(eventid)
        edict = detail.toDict(get_moment_supplement=True,
                              get_tensors='preferred')
        assert edict['us_Mww_percent_double_couple'] == 0.9992
Ejemplo n.º 25
0
def test_moment_supplement():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_moment.yaml')
    with vcr.use_cassette(tape_file):
        eventid = 'us2000ar20'  # 2017 M7.1 Mexico City
        detail = get_event_by_id(eventid)
        edict = detail.toDict(get_moment_supplement=True,
                              get_tensors='preferred')
        assert edict['us_Mww_percent_double_couple'] == 0.9992
Ejemplo n.º 26
0
def test_product():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_product.yaml')
    with vcr.use_cassette(tape_file):
        eventid = 'ci3144585'  # northridge
        event = get_event_by_id(eventid)
        product = event.getProducts('shakemap')[0]
        assert product.preferred_weight == 100000000
        assert product.source == 'atlas'
        assert product.update_time >= datetime(2017, 4, 12, 6, 25, 42, 120000)
        pnames = product.getContentsMatching('grid.xml')
        url = product.getContentURL('grid.xml')
        assert url == 'https://earthquake.usgs.gov/archive/product/shakemap/atlas19940117123055/atlas/1491978342120/download/grid.xml'
        assert len(product.getContentsMatching('foo')) == 0
        assert len(pnames) > 1
        assert str(
            product
        ) == 'Product shakemap from atlas updated 2017-04-12 06:25:42.120000 containing 63 content files.'
        assert product.hasProperty('maxmmi')
        assert 'maxmmi' in product.properties
        assert product['maxmmi'] == '8.6'
        assert 'download/cont_mi.kmz' in product.contents
        assert product.getContentName('grid.xml') == 'grid.xml'
        assert product.getContentName('foo') is None
        assert product.getContentURL('foo') is None

        try:
            product.getContent('foo', filename=None)
            assert 1 == 2
        except ContentNotFoundError as ae:
            pass

        try:
            product['foo']
            assert 1 == 2
        except AttributeError as ae:
            pass

        try:
            handle, tfilename = tempfile.mkstemp()
            os.close(handle)
            product.getContent('info.json', tfilename)
            f = open(tfilename, 'rt')
            jdict = json.load(f)
            f.close()
            assert jdict['input']['event_information']['depth'] == 19
        except:
            raise Exception('Failure to download Product content file')
        finally:
            os.remove(tfilename)

        # test getting content as a string.
        infobytes, url = product.getContentBytes('info.json')
        infostring = infobytes.decode('utf-8')
        jdict = json.loads(infostring)
        eid = jdict['input']['event_information']['event_id']
        assert eid == '19940117123055'
Ejemplo n.º 27
0
def retrieve_usgs_catalog(**kwargs):
    """
    Wrapper on obspy.clients.fdsn.Client and libcomcat (usgs) to retrieve a full
    catalog, including phase picks (that otherwise are not supported by the usgs
    fdsn implementation)

    :param kwargs: Will be passed to the Client (e.g. minlongitude, maxmagnitude
        etc...)
    :return: obspy.core.events.Catalog
    """
    cli = Client('https://earthquake.usgs.gov')
    cat = cli.get_events(**kwargs)
    # Now loop over each event and grab the phase dataframe using libcomcat
    for ev in cat:
        print(ev.resource_id.id)
        eid = ev.resource_id.id.split('=')[-2].split('&')[0]
        detail = get_event_by_id(eid, includesuperseded=True)
        phase_df = get_phase_dataframe(detail)
        o = ev.preferred_origin()
        for i, phase_info in phase_df.iterrows():
            seed_id = phase_info['Channel'].split('.')
            loc = seed_id[-1]
            if loc == '--':
                loc = ''
            wf_id = WaveformStreamID(network_code=seed_id[0],
                                     station_code=seed_id[1],
                                     location_code=loc,
                                     channel_code=seed_id[2])
            pk = Pick(time=UTCDateTime(phase_info['Arrival Time']),
                      method=phase_info['Status'],
                      waveform_id=wf_id,
                      phase_hint=phase_info['Phase'])
            ev.picks.append(pk)
            arr = Arrival(pick_id=pk.resource_id.id,
                          phase=pk.phase_hint,
                          azimuth=phase_info['Azimuth'],
                          distance=phase_info['Distance'],
                          time_residual=phase_info['Residual'],
                          time_weight=phase_info['Weight'])
            o.arrivals.append(arr)
        # Try to read focal mechanisms/moment tensors
        if 'moment-tensor' in detail.products:
            # Always take MT where available
            mt_xml = detail.getProducts('moment-tensor')[0].getContentBytes(
                'quakeml.xml')[0]
        elif 'focal-mechanism' in detail.products:
            mt_xml = detail.getProducts('focal-mechanism')[0].getContentBytes(
                'quakeml.xml')[0]
        else:
            continue
        mt_ev = read_events(
            io.TextIOWrapper(io.BytesIO(mt_xml), encoding='utf-8'))
        FM = mt_ev[0].focal_mechanisms[0]
        FM.triggering_origin_id = ev.preferred_origin().resource_id.id
        ev.focal_mechanisms = [FM]
    return cat
Ejemplo n.º 28
0
def test_product():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_product.yaml')
    with vcr.use_cassette(tape_file):
        eventid = 'ci3144585'  # northridge
        event = get_event_by_id(eventid)
        product = event.getProducts('shakemap')[0]
        assert product.preferred_weight == 100000000
        assert product.source == 'atlas'
        assert product.update_time >= datetime(2017, 4, 12, 6, 25, 42, 120000)
        pnames = product.getContentsMatching('grid.xml')
        url = product.getContentURL('grid.xml')
        assert url == 'https://earthquake.usgs.gov/archive/product/shakemap/atlas19940117123055/atlas/1491978342120/download/grid.xml'
        assert len(product.getContentsMatching('foo')) == 0
        assert len(pnames) > 1
        assert str(
            product) == 'Product shakemap from atlas updated 2017-04-12 06:25:42.120000 containing 63 content files.'
        assert product.hasProperty('maxmmi')
        assert 'maxmmi' in product.properties
        assert product['maxmmi'] == '8.6'
        assert 'download/cont_mi.kmz' in product.contents
        assert product.getContentName('grid.xml') == 'grid.xml'
        assert product.getContentName('foo') is None
        assert product.getContentURL('foo') is None

        try:
            product.getContent('foo', filename=None)
            assert 1 == 2
        except AttributeError as ae:
            pass

        try:
            product['foo']
            assert 1 == 2
        except AttributeError as ae:
            pass

        try:
            handle, tfilename = tempfile.mkstemp()
            os.close(handle)
            product.getContent('info.json', tfilename)
            f = open(tfilename, 'rt')
            jdict = json.load(f)
            f.close()
            assert jdict['input']['event_information']['depth'] == 19
        except:
            raise Exception('Failure to download Product content file')
        finally:
            os.remove(tfilename)

        # test getting content as a string.
        infobytes, url = product.getContentBytes('info.json')
        infostring = infobytes.decode('utf-8')
        jdict = json.loads(infostring)
        eid = jdict['input']['event_information']['event_id']
        assert eid == '19940117123055'
Ejemplo n.º 29
0
def test_get_event():
    eventid = 'ci3144585'
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_event.yaml')
    with vcr.use_cassette(tape_file):
        event = get_event_by_id(eventid)

    assert isinstance(event, DetailEvent)
    assert event.id == eventid
    assert (event.latitude, event.longitude) == (34.213, -118.537)
Ejemplo n.º 30
0
def test_get_event():
    eventid = 'ci3144585'
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'search_id.yaml')
    with vcr.use_cassette(tape_file):
        event = get_event_by_id(eventid)

    assert isinstance(event, DetailEvent)
    assert event.id == eventid
    assert (event.latitude, event.longitude) == (34.213, -118.537)
Ejemplo n.º 31
0
def test_dyfi():
    eventid = 'nc72282711'
    try:
        tdir = tempfile.mkdtemp()
        detail = get_event_by_id(eventid)
        dataframe, msg = _get_dyfi_dataframe(detail)
    except Exception:
        assert 1 == 2
    finally:
        if os.path.isdir(tdir):
            shutil.rmtree(tdir)
Ejemplo n.º 32
0
def make_template(df, sr):
    client = Client("IRIS")
    # make templates
    regional = df['Regional']
    eventid = regional + str(df['ID'])
    detail = get_event_by_id(eventid, includesuperseded=True)
    phases = get_phase_dataframe(detail, catalog=regional)
    phases = phases[phases['Status'] == 'manual']
    print(phases)
    phases = phases[~phases.
                    duplicated(keep='first', subset=['Channel', 'Phase'])]
    print(phases)
    st = Stream()
    tr = Stream()
    print(phases)
    for ii in range(len(phases)):
        net = phases.iloc[ii]['Channel'].split('.')[0]
        sta = phases.iloc[ii]['Channel'].split('.')[1]
        comp = phases.iloc[ii]['Channel'].split('.')[2]
        #phase=phases.iloc[ii]['Phase']
        arr = UTCDateTime(phases.iloc[ii]['Arrival Time'])
        #print(int(np.round(arr.microsecond/(1/sr*10**6))*1/sr*10**6)==1000000)
        if int(np.round(arr.microsecond / (1 / sr * 10**6)) * 1 / sr *
               10**6) == 1000000:
            arr.microsecond = 0
            arr.second = arr.second + 1
        else:
            arr.microsecond = int(
                np.round(arr.microsecond / (1 / sr * 10**6)) * 1 / sr * 10**6)
        t1 = arr - 1
        t2 = arr + 9
        try:
            tr = client.get_waveforms(net, sta, "*", comp, t1 - 2, t2 + 2)
        except:
            print("No data for " + net + " " + sta + " " + comp + " " +
                  str(t1) + " " + str(t2))
        else:
            print("Data available for " + net + " " + sta + " " + comp + " " +
                  str(t1) + " " + str(t2))
            tr.detrend()
            tr.trim(starttime=t1 - 2,
                    endtime=t2 + 2,
                    nearest_sample=1,
                    pad=1,
                    fill_value=0)
            tr.filter("bandpass", freqmin=2, freqmax=7)
            tr.interpolate(sampling_rate=sr, starttime=t1)
            tr.trim(starttime=t1,
                    endtime=t2,
                    nearest_sample=1,
                    pad=1,
                    fill_value=0)
            st += tr
    return st
Ejemplo n.º 33
0
def test_get_pager_data_frame():
    cassettes, datadir = get_datadir()
    EVENTID = 'us2000h8ty'
    detail = get_event_by_id(EVENTID)
    tape_file = os.path.join(cassettes, 'dataframes_pager.yaml')
    with vcr.use_cassette(tape_file):
        df = get_pager_data_frame(detail)
        mmi3_total = 2248544
        mmi3 = df.iloc[0]['mmi3']
        assert mmi3 == mmi3_total

        df = get_pager_data_frame(detail,
                                  get_losses=True,
                                  get_country_exposures=True)
        assert mmi3_total == df.iloc[1:]['mmi3'].sum()

        df = get_pager_data_frame(detail, get_losses=True)
        testfat = 13
        testeco = 323864991
        assert df.iloc[0]['predicted_fatalities'] == testfat
        assert df.iloc[0]['predicted_dollars'] == testeco

        df = get_pager_data_frame(detail,
                                  get_losses=True,
                                  get_country_exposures=True)
        assert df.iloc[1:]['predicted_fatalities'].sum() == testfat
        assert df.iloc[1:]['predicted_dollars'].sum() == testeco

        EVENTID = 'us1000778i'
        detail = get_event_by_id(EVENTID)
        df = get_pager_data_frame(detail)
        testval = 14380
        assert df.iloc[0]['mmi4'] == testval

        # test getting superseded versions of the pager product
        EVENTID = 'us2000h8ty'
        detail = get_event_by_id(EVENTID, includesuperseded=True)
        df = get_pager_data_frame(detail, get_losses=True)
        version_7 = df[df['pager_version'] == 7].iloc[0]
        v7fats = 16
        assert version_7['predicted_fatalities'] == v7fats
Ejemplo n.º 34
0
def get_fault(eventsource, eventsourcecode, comcat_host='earthquake.usgs.gov',
              model=None, write_directory=None):
    """Retrieve the latest finite_fault data for a given event.
    Args:
        eventsource (str): Network that originated the event.
        eventsourcecode (str): Event code from network that originated
                               the event.
        comcat_host (str): (for testing) Specify an alternate comcat host.
        two_model (bool): Whether the ffm has two equally valid solutions.
                Default is False.
        write_directory (str): Path to directory where files will be written.
                Default is None.
    """
    eventid = eventsource + eventsourcecode
    try:
        detail = get_event_by_id(eventid, host=comcat_host)
    except Exception as e:
        raise(e)
    if not detail.hasProduct(PRODUCT_TYPE):
        raise Exception('Event %r has no finite-fault product.' % eventid)
    if model is not None:
        mod1 = ''
        for prod in detail._jdict['properties']['products'][PRODUCT_TYPE]:
            if prod['code'].endswith(f'_{model}'):
                if mod1 == '':
                    latest_time1 = get_date(prod['updateTime'])
                if get_date(prod['updateTime']) >= latest_time1:
                    latest_time1 = get_date(prod['updateTime'])
                    mod1 = Product('finite-fault', 'last', prod)
        if mod1 == '':
            raise Exception(f'Model number, {model}, was not found for this '
                            'finite fault product %r' % eventid)
    else:
        mod1 = detail.getProducts(PRODUCT_TYPE, version='last')[0]

    if write_directory is not None:
        now = datetime.datetime.utcnow()
        date_str = now.strftime(TIMEFMT.replace(':', '_').replace('.%f', ''))
        if model is not None:
            dir1 = os.path.join(write_directory,
                                eventid + f'_{model}_' + date_str)
            if not os.path.exists(dir1):
                os.makedirs(dir1, exist_ok=True)
            for file1 in mod1.contents:
                filename1 = os.path.join(dir1, os.path.basename(file1))
                mod1.getContent(file1, filename1)
        else:
            dir = os.path.join(write_directory, eventid + '_' + date_str)
            if not os.path.exists(dir):
                os.makedirs(dir, exist_ok=True)
            for download_file in mod1.contents:
                filename = os.path.join(dir, os.path.basename(download_file))
                mod1.getContent(download_file, filename)
Ejemplo n.º 35
0
def test_detail_product_versions():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'classes_detailsummary.yaml')
    with vcr.use_cassette(tape_file):
        eventid = 'nn00570710'
        detail = get_event_by_id(eventid, includesuperseded=True)
        pref_origin_pref_source = detail.getProducts('origin',
                                                     source='preferred',
                                                     version='last')[0]
        pref_origin_pref_source2 = detail.getProducts('origin')[0]

        first_origin_pref_source = detail.getProducts('origin',
                                                      source='preferred',
                                                      version='first')[0]
        first_origin_us_source = detail.getProducts('origin',
                                                    source='us',
                                                    version='first')[0]
        last_origin_us_source = detail.getProducts('origin',
                                                   source='us',
                                                   version='last')[0]

        pref_origins_all_sources = detail.getProducts('origin',
                                                      source='all',
                                                      version='last')
        first_origins_all_sources = detail.getProducts('origin',
                                                       source='all',
                                                       version='first')

        assert pref_origin_pref_source.source == 'nn'
        assert pref_origin_pref_source2.source == 'nn'
        assert pref_origin_pref_source.version >= 7
        assert pref_origin_pref_source2.version >= 7
        assert first_origin_pref_source.source == 'nn'
        assert first_origin_pref_source.version == 1
        assert first_origin_us_source.source == 'us'
        assert first_origin_us_source.version == 1
        assert last_origin_us_source.source == 'us'
        assert last_origin_us_source.version >= 5

        sources = []
        for origin in pref_origins_all_sources:
            source = origin.source
            version = origin.version
            assert source not in sources
            sources.append(source)

        sources = []
        for origin in first_origins_all_sources:
            source = origin.source
            version = origin.version
            assert source not in sources
            assert version == 1
            sources.append(source)
Ejemplo n.º 36
0
def test_geocoded():
    # first, test event with 10k and 1k geojson data
    eventid = 'ci14607652'
    datadir = get_datadir()
    tape_file1 = os.path.join(datadir, 'vcr_event1.yaml')

    with vcr.use_cassette(tape_file1):
        detail = get_event_by_id(eventid)
        df, msg = _get_dyfi_dataframe(detail)

    np.testing.assert_almost_equal(df['INTENSITY'].sum(), 4510.1)

    # next, test event with only geocoded (?) resolution text data
    eventid = 'ci14745580'
    tape_file2 = os.path.join(datadir, 'vcr_event2.yaml')

    with vcr.use_cassette(tape_file2):
        detail = get_event_by_id(eventid)
        df, msg = _get_dyfi_dataframe(detail)

    np.testing.assert_almost_equal(df['INTENSITY'].sum(), 800.4)
Ejemplo n.º 37
0
def test_get_pager_data_frame():
    datadir = get_datadir()
    EVENTID = 'us2000h8ty'
    detail = get_event_by_id(EVENTID)
    tape_file = os.path.join(datadir, 'vcr_pager_results.yaml')
    # with vcr.use_cassette(tape_file):
    df = get_pager_data_frame(detail)
    mmi3_total = 2248544
    mmi3 = df.iloc[0]['mmi3']
    assert mmi3 == mmi3_total

    df = get_pager_data_frame(detail, get_country_exposures=True)
    assert mmi3_total == df.iloc[1:]['mmi3'].sum()

    df = get_pager_data_frame(detail, get_losses=True)
    testfat = 13
    testeco = 323864991
    assert df.iloc[0]['predicted_fatalities'] == testfat
    assert df.iloc[0]['predicted_dollars'] == testeco

    df = get_pager_data_frame(detail, get_losses=True,
                              get_country_exposures=True)
    assert df.iloc[1:]['predicted_fatalities'].sum() == testfat
    assert df.iloc[1:]['predicted_dollars'].sum() == testeco

    EVENTID = 'us1000778i'
    detail = get_event_by_id(EVENTID)
    df = get_pager_data_frame(detail)
    testval = 14380
    assert df.iloc[0]['mmi4'] == testval

    # test getting superseded versions of the pager product
    EVENTID = 'us2000h8ty'
    detail = get_event_by_id(EVENTID, includesuperseded=True)
    df = get_pager_data_frame(detail, get_losses=True)
    version_7 = df[df['pager_version'] == 7].iloc[0]
    v7fats = 16
    assert version_7['predicted_fatalities'] == v7fats
Ejemplo n.º 38
0
def test_dyfi():
    eventid = 'se60247871'
    detail = get_event_by_id(eventid, includesuperseded=True)
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_dyfi_dataframe.yaml')
    with vcr.use_cassette(tape_file):
        df1km = get_dyfi_data_frame(detail, dyfi_file='utm_1km')
        np.testing.assert_almost_equal(df1km['intensity'].sum(), 14887.3)
        df10km = get_dyfi_data_frame(detail, dyfi_file='utm_10km')
        np.testing.assert_almost_equal(df10km['intensity'].sum(), 3479.5)
        dfutm = get_dyfi_data_frame(detail, dyfi_file='utm_var')
        np.testing.assert_almost_equal(dfutm['intensity'].sum(), 3479.5)
        dfzip = get_dyfi_data_frame(detail, dyfi_file='zip')
        np.testing.assert_almost_equal(dfzip['intensity'].sum(), 2344.7)
Ejemplo n.º 39
0
def test_detail_product_versions():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_detail_product.yaml')
    with vcr.use_cassette(tape_file):
        eventid = 'nn00570710'
        detail = get_event_by_id(eventid, includesuperseded=True)
        pref_origin_pref_source = detail.getProducts(
            'origin', source='preferred', version=VersionOption.LAST)[0]
        pref_origin_pref_source2 = detail.getProducts('origin')[0]

        first_origin_pref_source = detail.getProducts(
            'origin', source='preferred', version=VersionOption.FIRST)[0]
        first_origin_us_source = detail.getProducts(
            'origin', source='us', version=VersionOption.FIRST)[0]
        last_origin_us_source = detail.getProducts(
            'origin', source='us', version=VersionOption.LAST)[0]

        pref_origins_all_sources = detail.getProducts(
            'origin', source='all', version=VersionOption.LAST)
        first_origins_all_sources = detail.getProducts(
            'origin', source='all', version=VersionOption.FIRST)
        all_origins_all_sources = detail.getProducts(
            'origin', source='all', version=VersionOption.ALL)

        assert pref_origin_pref_source.source == 'nn'
        assert pref_origin_pref_source2.source == 'nn'
        assert pref_origin_pref_source.version >= 7
        assert pref_origin_pref_source2.version >= 7
        assert first_origin_pref_source.source == 'nn'
        assert first_origin_pref_source.version == 1
        assert first_origin_us_source.source == 'us'
        assert first_origin_us_source.version == 1
        assert last_origin_us_source.source == 'us'
        assert last_origin_us_source.version >= 5

        sources = []
        for origin in pref_origins_all_sources:
            source = origin.source
            version = origin.version
            assert source not in sources
            sources.append(source)

        sources = []
        for origin in first_origins_all_sources:
            source = origin.source
            version = origin.version
            assert source not in sources
            assert version == 1
            sources.append(source)
Ejemplo n.º 40
0
    def getOnlineTensor(self, eventid):
        """Get tensor parameters from preferred ComCat moment tensor.

        Args:
            eventid (str): ComCat EventID (Sumatra is official20041226005853450_30).
        Returns:
            Moment tensor parameters dictionary:
                - source Moment Tensor source
                - type usually mww,mwc,mwb,mwr,TMTS or "unknown".
                - mrr,mtt,mpp,mrt,mrp,mtp Moment tensor components.
                - T T-axis values:
                  - azimuth
                  - plunge
                - N N-axis values:
                  - azimuth
                  - plunge
                - P P-axis values:
                  - azimuth
                  - plunge
                - NP1 First nodal plane values:
                  - strike
                  - dip
                  - rake
                - NP2 Second nodal plane values:
                  - strike
                  - dip
                  - rake
        """
        try:
            detail = get_event_by_id(eventid)
        except Exception as e:
            return (None,None,None,None)
        lat = detail.latitude
        lon = detail.longitude
        depth = detail.depth
        if not detail.hasProduct('moment-tensor'):
            return lat, lon, depth, None

        tensor = detail.getProducts('moment-tensor')[0]
        tensor_params = {}
        btype = 'unknown'
        if tensor.hasProperty('derived-magnitude-type'):
            btype = tensor['derived-magnitude-type']
        elif tensor.hasProperty('beachball-type'):
            btype = tensor['beachball-type']
        if btype.find('/') > -1:
            btype = btype.split('/')[-1]
        tensor_params['type'] = btype
        tensor_params['source'] = tensor['eventsource'] + \
            '_' + tensor['eventsourcecode']

        tensor_params['mtt'] = float(tensor['tensor-mtt'])
        tensor_params['mpp'] = float(tensor['tensor-mpp'])
        tensor_params['mrr'] = float(tensor['tensor-mrr'])
        tensor_params['mtp'] = float(tensor['tensor-mtp'])
        tensor_params['mrt'] = float(tensor['tensor-mrt'])
        tensor_params['mrp'] = float(tensor['tensor-mrp'])


        # sometimes the online MT is missing properties
        if not tensor.hasProperty('t-axis-length'):
            tensor_dict = fill_tensor_from_components(tensor_params['mrr'],
                                                      tensor_params['mtt'],
                                                      tensor_params['mpp'],
                                                      tensor_params['mrt'],
                                                      tensor_params['mrp'],
                                                      tensor_params['mtp'])
            tensor_params['T'] = tensor_dict['T'].copy()
            tensor_params['N'] = tensor_dict['T'].copy()
            tensor_params['P'] = tensor_dict['P'].copy()
        else:
            T = {}
            T['value'] = float(tensor['t-axis-length'])
            T['plunge'] = float(tensor['t-axis-plunge'])
            T['azimuth'] = float(tensor['t-axis-azimuth'])
            tensor_params['T'] = T.copy()

            N = {}
            N['value'] = float(tensor['n-axis-length'])
            N['plunge'] = float(tensor['n-axis-plunge'])
            N['azimuth'] = float(tensor['n-axis-azimuth'])
            tensor_params['N'] = N.copy()

            P = {}
            P['value'] = float(tensor['p-axis-length'])
            P['plunge'] = float(tensor['p-axis-plunge'])
            P['azimuth'] = float(tensor['p-axis-azimuth'])
            tensor_params['P'] = P.copy()


        if not tensor.hasProperty('nodal-plane-1-strike'):
            tensor2 = fill_tensor_from_components(tensor_params['mrr'],
                                                  tensor_params['mtt'],
                                                  tensor_params['mpp'],
                                                  tensor_params['mrt'],
                                                  tensor_params['mrp'],
                                                  tensor_params['mtp'])
            tensor_params['NP1'] = tensor2['NP1'].copy()
            tensor_params['NP2'] = tensor2['NP2'].copy()
        else:
            NP1 = {}
            NP1['strike'] = float(tensor['nodal-plane-1-strike'])
            NP1['dip'] = float(tensor['nodal-plane-1-dip'])
            if 'nodal-plane-1-rake' in tensor.properties:
                NP1['rake'] = float(tensor['nodal-plane-1-rake'])
            else:
                NP1['rake'] = float(tensor['nodal-plane-1-slip'])
            tensor_params['NP1'] = NP1.copy()

            NP2 = {}
            NP2['strike'] = float(tensor['nodal-plane-2-strike'])
            NP2['dip'] = float(tensor['nodal-plane-2-dip'])
            if 'nodal-plane-2-rake' in tensor.properties:
                NP2['rake'] = float(tensor['nodal-plane-2-rake'])
            else:
                NP2['rake'] = float(tensor['nodal-plane-2-slip'])
            tensor_params['NP2'] = NP2.copy()

        return lat, lon, depth, tensor_params
Ejemplo n.º 41
0
def test_get_event():
    eventid = 'ci3144585'
    event = get_event_by_id(eventid)
    assert isinstance(event,DetailEvent)
Ejemplo n.º 42
0
def test_impact_data_frame():
    datadir = get_datadir()
    print('iscgem910478 limited sources')
    iscgem910478_file = os.path.join(datadir, 'impact_iscgem910478.csv')
    event = get_event_by_id('iscgem910478',
                            host="dev01-earthquake.cr.usgs.gov")
    iscgem910478 = get_impact_data_frame(event).reset_index(drop=True)
    target_iscgem910478 = pd.read_csv(
        iscgem910478_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        iscgem910478[column] = iscgem910478[column].astype(str)
        target_iscgem910478[column] = target_iscgem910478[column].astype(str)
    pd.util.testing.assert_frame_equal(iscgem910478, target_iscgem910478)

    print('iscgem910478 all sources')
    iscgem910478_file = os.path.join(
        datadir, 'impact_iscgem910478_allsources.csv')
    event = get_event_by_id('iscgem910478',
                            host="dev01-earthquake.cr.usgs.gov")
    iscgem910478 = get_impact_data_frame(
        event, all_sources=True).reset_index(drop=True)
    target_iscgem910478 = pd.read_csv(
        iscgem910478_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        iscgem910478[column] = iscgem910478[column].astype(str)
        target_iscgem910478[column] = target_iscgem910478[column].astype(str)
    pd.util.testing.assert_frame_equal(iscgem910478, target_iscgem910478)

    print('iscgem910478 limited sources and shaking')
    iscgem910478_file = os.path.join(
        datadir, 'impact_iscgem910478_shaking.csv')
    event = get_event_by_id('iscgem910478',
                            host="dev01-earthquake.cr.usgs.gov")
    iscgem910478 = get_impact_data_frame(
        event, effect_types='shaking').reset_index(drop=True)
    target_iscgem910478 = pd.read_csv(
        iscgem910478_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        iscgem910478[column] = iscgem910478[column].astype(str)
        target_iscgem910478[column] = target_iscgem910478[column].astype(str)
    pd.util.testing.assert_frame_equal(iscgem910478, target_iscgem910478)

    print('usp0005rcg without contributing')
    usp0005rcg_file = os.path.join(datadir, 'impact_usp0005rcg.csv')
    event = get_event_by_id('usp0005rcg',
                            host="dev02-earthquake.cr.usgs.gov")
    usp0005rcg = get_impact_data_frame(event).reset_index(drop=True)
    target_usp0005rcg = pd.read_csv(
        usp0005rcg_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        usp0005rcg[column] = usp0005rcg[column].astype(str)
        target_usp0005rcg[column] = target_usp0005rcg[column].astype(str)
    pd.util.testing.assert_frame_equal(usp0005rcg, target_usp0005rcg)

    print('usp0005rcg without contributing all sources')
    usp0005rcg_file = os.path.join(datadir,
                                   'impact_usp0005rcg_allsources.csv')
    event = get_event_by_id('usp0005rcg',
                            host="dev02-earthquake.cr.usgs.gov")
    usp0005rcg = get_impact_data_frame(event,
                                       all_sources=True).reset_index(drop=True)
    target_usp0005rcg = pd.read_csv(
        usp0005rcg_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        usp0005rcg[column] = usp0005rcg[column].astype(str)
        target_usp0005rcg[column] = target_usp0005rcg[column].astype(str)
    pd.util.testing.assert_frame_equal(usp0005rcg, target_usp0005rcg)

    print('usp0005rcg with contributing all sources')
    usp0005rcg_file = os.path.join(datadir,
                                   'impact_usp0005rcg_allsources_contributing.csv')
    event = get_event_by_id('usp0005rcg',
                            host="dev02-earthquake.cr.usgs.gov")
    usp0005rcg = get_impact_data_frame(event,
                                       include_contributing=True, all_sources=True).reset_index(drop=True)
    target_usp0005rcg = pd.read_csv(
        usp0005rcg_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        usp0005rcg[column] = usp0005rcg[column].astype(str)
        target_usp0005rcg[column] = target_usp0005rcg[column].astype(str)
    pd.util.testing.assert_frame_equal(usp0005rcg, target_usp0005rcg)

    print('usp0005rcg with contributing')
    usp0005rcg_file = os.path.join(datadir,
                                   'impact_usp0005rcg_contributing.csv')
    event = get_event_by_id('usp0005rcg',
                            host="dev02-earthquake.cr.usgs.gov")
    usp0005rcg = get_impact_data_frame(event,
                                       include_contributing=True).reset_index(drop=True)
    target_usp0005rcg = pd.read_csv(
        usp0005rcg_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        usp0005rcg[column] = usp0005rcg[column].astype(str)
        target_usp0005rcg[column] = target_usp0005rcg[column].astype(str)
    pd.util.testing.assert_frame_equal(usp0005rcg, target_usp0005rcg)

    print('usp0005rcg landslide and shaking')
    usp0005rcg_file = os.path.join(datadir,
                                   'impact_usp0005rcg_landslide_shaking.csv')
    event = get_event_by_id('usp0005rcg',
                            host="dev02-earthquake.cr.usgs.gov")
    usp0005rcg = get_impact_data_frame(event,
                                       effect_types=['landslide', 'shaking']).reset_index(drop=True)
    target_usp0005rcg = pd.read_csv(
        usp0005rcg_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        usp0005rcg[column] = usp0005rcg[column].astype(str)
        target_usp0005rcg[column] = target_usp0005rcg[column].astype(str)
    pd.util.testing.assert_frame_equal(usp0005rcg, target_usp0005rcg)

    print('usp0005rcg destroyed')
    usp0005rcg_file = os.path.join(datadir,
                                   'impact_usp0005rcg_destroyed.csv')
    event = get_event_by_id('usp0005rcg',
                            host="dev02-earthquake.cr.usgs.gov")
    usp0005rcg = get_impact_data_frame(event,
                                       loss_extents=['destroyed']).reset_index(drop=True)
    target_usp0005rcg = pd.read_csv(
        usp0005rcg_file).reset_index(drop=True).fillna('')
    for column in iscgem910478.columns:
        usp0005rcg[column] = usp0005rcg[column].astype(str)
        target_usp0005rcg[column] = target_usp0005rcg[column].astype(str)
    pd.util.testing.assert_frame_equal(usp0005rcg, target_usp0005rcg)
Ejemplo n.º 43
0
def test_phase_dataframe():
    detail = get_event_by_id('us1000778i') #2016 NZ event
    df = get_phase_dataframe(detail,catalog='us')
    assert len(df) == 174
Ejemplo n.º 44
0
def test_moment_supplement():
    eventid = 'us2000ar20' #2017 M7.1 Mexico City
    detail = get_event_by_id(eventid)
    edict = detail.toDict(get_moment_supplement=True,get_tensors='preferred')
    assert edict['us_Mww_percent_double_couple'] == 0.9992
Ejemplo n.º 45
0
def test_detail():
    eventid = 'ci3144585' #northridge
    url = 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/detail/%s.geojson' % eventid
    event = DetailEvent(url)
    assert str(event) == 'ci3144585 1994-01-17 12:30:55.390000 (34.213,-118.537) 18.2 km M6.7'
    assert event.hasProduct('shakemap')
    assert event.hasProduct('foo') == False
    assert event.hasProperty('foo') == False
    assert event.hasProperty('time')
    try:
        event['foo']
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getNumVersions('foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getProducts('foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    try:
        event.getProducts('shakemap',source='foo')
        assert 1 == 2
    except AttributeError as ae:
        pass
    
    assert event.toDict()['magnitude'] == 6.7

    eventid = 'nc72282711' #Napa 2014 eq, multiple origins and MTs.
    # cievent = get_event_by_id(eventid,catalog='ci')
    # usevent = get_event_by_id(eventid,catalog='us')
    # atevent = get_event_by_id(eventid,catalog='at')
    event = get_event_by_id(eventid)

    phases = event.getProducts('phase-data',source='all')
    
    ncdict = event.toDict(catalog='nc')
    usdict = event.toDict(catalog='us')
    atdict = event.toDict(catalog='at')

    try:
        event.toDict(catalog='foo')
        assert 1 == 2
    except AttributeError as ae:
        pass

    assert ncdict['depth'] == 11.12
    assert usdict['depth'] == 11.25
    assert atdict['depth'] == 9.0

    ncdict_allmags = event.toDict(get_all_magnitudes=True)
    assert ncdict_allmags['magtype3'] == 'Ml'

    ncdict_alltensors = event.toDict(get_tensors='all')
    assert ncdict_alltensors['us_Mwb_mrr'] == 7.63e+16
    ncdict_allfocals = event.toDict(get_focals='all')
    assert ncdict_allfocals['nc_np1_strike'] == '345.0'

    assert event.getNumVersions('shakemap') > 0
    assert isinstance(event.getProducts('shakemap')[0],Product)
    assert event.latitude == 38.2151667
    assert event.longitude == -122.3123333
    assert event.depth == 11.12
    assert event.id == eventid
    assert event.time == datetime(2014, 8, 24, 10, 20, 44, 70000)
    assert 'sources' in event.properties
    assert event['mag'] == 6.02

    #test all of the different functionality of the getProducts() method
    #first, test default behavior (get the most preferred product):
    event = get_event_by_id('nc21323712',includesuperseded=True) #2003 Central California
    pref_shakemap = event.getProducts('shakemap')[0]
    assert pref_shakemap.source == 'atlas'
    assert pref_shakemap.update_time >= datetime(2017, 4, 12, 10, 50, 9, 368000)
    assert pref_shakemap.preferred_weight >= 100000000

    #get the first Atlas shakemap
    first_shakemap = event.getProducts('shakemap',version=VersionOption.FIRST,source='atlas')[0]
    assert first_shakemap.source == 'atlas'
    assert first_shakemap.update_time >= datetime(2015, 2, 4, 6, 1, 33, 400000)
    assert first_shakemap.preferred_weight >= 81

    #get the first nc shakemap
    first_shakemap = event.getProducts('shakemap',version=VersionOption.FIRST,source='nc')[0]
    assert first_shakemap.source == 'nc'
    assert first_shakemap.update_time >= datetime(2017, 3, 8, 20, 12, 59, 380000)
    assert first_shakemap.preferred_weight >= 231
    
    #get the last version of the nc shakemaps
    last_shakemap = event.getProducts('shakemap',version=VersionOption.LAST,source='nc')[0]
    assert last_shakemap.source == 'nc'
    assert last_shakemap.update_time >= datetime(2017, 3, 17, 17, 40, 26, 576000)
    assert last_shakemap.preferred_weight >= 231

    #get all the nc versions of the shakemap
    shakemaps = event.getProducts('shakemap',version=VersionOption.ALL,source='nc')
    for shakemap4 in shakemaps:
        assert shakemap4.source == 'nc'

    #get all versions of all shakemaps
    shakemaps = event.getProducts('shakemap',version=VersionOption.ALL,source='all')
    assert event.getNumVersions('shakemap') == len(shakemaps)