Пример #1
0
def test_search():
    eventlist = search(starttime=datetime(1994,1,17,12,30),
                       endtime=datetime(1994,1,18,12,35),
                       minmagnitude=6.6)
    event = eventlist[0]
    assert event.id == 'ci3144585'

    events = search(minmagnitude=9.0,maxmagnitude=9.9,
                    updatedafter=datetime(2010,1,1))

    events = search(maxmagnitude=0.1,
                    starttime=datetime(2017,1,1),
                    endtime=datetime(2017,1,30))
Пример #2
0
def test_summary():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_summary.yaml')
    with vcr.use_cassette(tape_file):
        eventlist = search(starttime=datetime(1994, 1, 17, 12, 30),
                           endtime=datetime(1994, 1, 18, 12, 35),
                           minmagnitude=6.6)
        event = eventlist[0]
        cmp = 'ci3144585 1994-01-17 12:30:55.390000 (34.213,-118.537) 18.2 km M6.7'
        assert str(event) == cmp
        assert event.id == 'ci3144585'
        assert event.time == datetime(1994, 1, 17, 12, 30, 55, 390000)
        assert event.latitude == 34.213
        assert event.longitude == -118.537
        assert event.depth == 18.202
        assert event.magnitude == 6.7
        assert 'cdi' in event.properties
        assert event['cdi'] == 8.6
        assert event.hasProduct('shakemap')
        assert not event.hasProduct('foo')
        try:
            event['foo']
            assert 1 == 2
        except AttributeError:
            pass
        assert event.hasProperty('cdi')
        assert not event.hasProperty('foo')
        assert isinstance(event.getDetailEvent(), DetailEvent)
        durl = 'https://earthquake.usgs.gov/fdsnws/event/1/query?eventid=ci3144585&format=geojson'
        assert event.getDetailURL() == durl
        try:
            detail = event.getDetailEvent(includedeleted=True,
                                          includesuperseded=True)
            assert 1 == 2
        except RuntimeError:
            pass

        # find an event that has multiple versions of shakemap to test
        # include superseded
        # official20110311054624120_30
        eventlist = search(starttime=datetime(2011, 3, 11, 0, 0),
                           endtime=datetime(2011, 3, 12, 0, 0),
                           minmagnitude=8.8)
        honshu = eventlist[0]
        detail = honshu.getDetailEvent(includesuperseded=True)
        shakemaps = detail.getProducts('shakemap', version=VersionOption.ALL)
        assert shakemaps[1].source == 'atlas'
        assert event.toDict()['depth'] == 18.202
Пример #3
0
def test_get_summary_data_frame():
    events = search(starttime=datetime(1994,6,1),
                    endtime=datetime(1994,10,6),
                    minmagnitude=8.0,maxmagnitude=9.0,verbose=True)

    df = get_summary_data_frame(events)
    assert len(df) == 2
    assert df.iloc[0]['magnitude'] == 8.2
Пример #4
0
def test_summary():
    eventlist = search(starttime=datetime(1994, 1, 17, 12, 30),
                       endtime=datetime(1994, 1, 18, 12, 35),
                       minmagnitude=6.6)
    event = eventlist[0]
    assert str(
        event
    ) == 'ci3144585 1994-01-17 12:30:55.390000 (34.213,-118.537) 18.2 km M6.7'
    assert event.id == 'ci3144585'
    assert event.time == datetime(1994, 1, 17, 12, 30, 55, 390000)
    assert event.latitude == 34.213
    assert event.longitude == -118.537
    assert event.depth == 18.202
    assert event.magnitude == 6.7
    assert 'cdi' in event.properties
    assert event['cdi'] == 8.6
    assert event.hasProduct('shakemap')
    assert event.hasProduct('foo') == False
    try:
        event['foo']
        assert 1 == 2
    except AttributeError as ae:
        pass
    assert event.hasProperty('cdi')
    assert event.hasProperty('foo') == False
    assert isinstance(event.getDetailEvent(), DetailEvent)
    durl = 'https://earthquake.usgs.gov/fdsnws/event/1/query?eventid=ci3144585&format=geojson'
    assert event.getDetailURL() == durl
    try:
        detail = event.getDetailEvent(includedeleted=True,
                                      includesuperseded=True)
        assert 1 == 2
    except RuntimeError as re:
        pass

    #find an event that has multiple versions of shakemap to test includesuperseded
    #official20110311054624120_30
    eventlist = search(starttime=datetime(2011, 3, 11, 0, 0),
                       endtime=datetime(2011, 3, 12, 0, 0),
                       minmagnitude=8.8)
    honshu = eventlist[0]
    detail = honshu.getDetailEvent(includesuperseded=True)
    shakemaps = detail.getProducts('shakemap', version=VersionOption.ALL)
    assert shakemaps[1].source == 'atlas'
    assert event.toDict()['depth'] == 18.202
Пример #5
0
def fetch_day(mag=0):

	d = fetch_date()
	y, m, d = d.year, d.month, d.day

	events = search(starttime=datetime(y,m,d,0,0),
		endtime=datetime(y,m,d,23,59), minmagnitude=mag)

	return [{'time': '{}-{}-{}'.format(y, m, d), 'event': dict(i.toDict()), 'props': fetch_properties(i)} for i in events]
Пример #6
0
def test_get_detail_data_frame():
    events = search(starttime=datetime(1994, 6, 1),
                    endtime=datetime(1994, 10, 6),
                    minmagnitude=8.0,
                    maxmagnitude=9.0)
    all_mags = get_detail_data_frame(events,
                                     get_all_magnitudes=True,
                                     verbose=True)
    assert all_mags.iloc[0]['magnitude'] == 8.2
Пример #7
0
def test_search():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_search.yaml')
    with vcr.use_cassette(tape_file):
        eventlist = search(starttime=datetime(1994, 1, 17, 12, 30),
                           endtime=datetime(1994, 1, 18, 12, 35),
                           minmagnitude=6.6)
        event = eventlist[0]
        assert event.id == 'ci3144585'

        events = search(minmagnitude=9.0, maxmagnitude=9.9,
                        starttime=datetime(2008, 1, 1),
                        endtime=datetime(2010, 2, 1),
                        updatedafter=datetime(2010, 1, 1))

        events = search(maxmagnitude=0.1,
                        starttime=datetime(2017, 1, 1),
                        endtime=datetime(2017, 1, 30))
Пример #8
0
def test_get_summary_data_frame():
    events = search(starttime=datetime(1994, 6, 1),
                    endtime=datetime(1994, 10, 6),
                    minmagnitude=8.0,
                    maxmagnitude=9.0,
                    verbose=True)

    df = get_summary_data_frame(events)
    assert len(df) == 2
    assert df.iloc[0]['magnitude'] == 8.2
Пример #9
0
def test_get_detail_data_frame():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_detail_frame.yaml')
    with vcr.use_cassette(tape_file):
        events = search(starttime=datetime(1994, 6, 1),
                        endtime=datetime(1994, 10, 6),
                        minmagnitude=8.0, maxmagnitude=9.0)
        all_mags = get_detail_data_frame(
            events, get_all_magnitudes=True, verbose=True)
        assert all_mags.iloc[0]['magnitude'] == 8.2
Пример #10
0
def test_get_detail_data_frame():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'dataframes_detailed.yaml')
    with vcr.use_cassette(tape_file):
        events = search(starttime=datetime(1994, 6, 1),
                        endtime=datetime(1994, 10, 6),
                        minmagnitude=8.0,
                        maxmagnitude=9.0)
        all_mags = get_detail_data_frame(events, get_all_magnitudes=True)
        assert all_mags.iloc[0]['magnitude'] == 8.2
Пример #11
0
def test_search():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'search_search.yaml')
    with vcr.use_cassette(tape_file):
        eventlist = search(starttime=datetime(1994, 1, 17, 12, 30),
                           endtime=datetime(1994, 1, 18, 12, 35),
                           minmagnitude=6.6)
        event = eventlist[0]
        assert event.id == 'ci3144585'

        events = search(minmagnitude=9.0,
                        maxmagnitude=9.9,
                        starttime=datetime(2008, 1, 1),
                        endtime=datetime(2010, 2, 1),
                        updatedafter=datetime(2010, 1, 1))

        events = search(maxmagnitude=0.1,
                        starttime=datetime(2017, 1, 1),
                        endtime=datetime(2017, 1, 30))
Пример #12
0
def seismes(num_page=5, refresh=1):
    date_now = datetime.now()
    date_last_hour= date_now - timedelta(hours = num_page)

    eventlist = search(starttime = date_last_hour,
                       endtime = date_now,
                       minmagnitude = 0)

    listseismes = lastseismes(eventlist)
    lenlist = len(listseismes)
    return render_template('map_v2_1.html', listseismes=listseismes, refresh=refresh, lenlist=lenlist)
Пример #13
0
def test_get_summary_data_frame():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_summary_frame.yaml')
    with vcr.use_cassette(tape_file):
        events = search(starttime=datetime(1994, 6, 1),
                        endtime=datetime(1994, 10, 6),
                        minmagnitude=8.0, maxmagnitude=9.0, verbose=True)

        df = get_summary_data_frame(events)
        assert len(df) == 2
        assert df.iloc[0]['magnitude'] == 8.2
Пример #14
0
def test_get_summary_data_frame():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'dataframes_summary.yaml')
    with vcr.use_cassette(tape_file):
        events = search(starttime=datetime(1994, 6, 1),
                        endtime=datetime(1994, 10, 6),
                        minmagnitude=8.0, maxmagnitude=9.0)

        df = get_summary_data_frame(events)
        assert len(df) == 2
        assert df.iloc[0]['magnitude'] == 8.2
Пример #15
0
def test_get_detail_data_frame():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "dataframes_detailed.yaml")
    with vcr.use_cassette(tape_file, record_mode="new_episodes"):
        events = search.search(
            starttime=datetime(1994, 6, 1),
            endtime=datetime(1994, 10, 6),
            minmagnitude=8.0,
            maxmagnitude=9.0,
        )
        all_mags = get_detail_data_frame(events, get_all_magnitudes=True)
        assert all_mags.iloc[0]["magnitude"] == 8.2
Пример #16
0
def test_url_error():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'search_error.yaml')
    with vcr.use_cassette(tape_file):
        passed = True
        try:
            eventlist = search(starttime=datetime(1994, 1, 17, 12, 30),
                               endtime=datetime(1994, 1, 18, 12, 35),
                               minmagnitude=6.6,
                               host="error")
        except Exception as e:
            passed = False
        assert passed == False
Пример #17
0
def test_get_summary_data_frame():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "dataframes_summary.yaml")
    with vcr.use_cassette(tape_file, record_mode="new_episodes"):
        events = search.search(
            starttime=datetime(1994, 6, 1),
            endtime=datetime(1994, 10, 6),
            minmagnitude=8.0,
            maxmagnitude=9.0,
        )

        df = get_summary_data_frame(events)
        assert len(df) == 2
        assert df.iloc[0]["magnitude"] == 8.2
Пример #18
0
def test_scenario():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'search_scenario.yaml')
    with vcr.use_cassette(tape_file):
        try:
            eventlist = search(starttime=datetime(2013, 10, 10, 12, 0),
                               endtime=datetime(2013, 10, 10, 12, 30, 0),
                               minmagnitude=0,
                               maxmagnitude=9.9,
                               scenario=True)
            assert eventlist[0].id == 'ak013d08buqb'
        except Exception as e:
            raise AssertionError('Scenario search failed with "%s".' %
                                 (str(e)))
Пример #19
0
def seismes_v2(date_format, refresh=1): 
    date_now = datetime.now()
    if date_format == 0:
        date_past= date_now - timedelta(hours = date_now.hour)
    elif date_format == 1:
        date_past = date_now - timedelta(days = date_now.weekday())
    elif date_format == 2:
        date_past = date_now - timedelta(days = date_now.day)

    eventlist = search(starttime = date_past,
                       endtime = date_now,
                       minmagnitude = 0)

    listseismes = lastseismes(eventlist)
    lenlist = len(listseismes)
    return render_template('map_v2_1.html', listseismes=listseismes, refresh=refresh, lenlist=lenlist)
Пример #20
0
    def load_catalog(self, extra_comcat_params):
        """
        Uses the libcomcat api (https://github.com/usgs/libcomcat) to parse the ComCat database for event information for
        California.

        The default parameters are given from the California testing region defined by the CSEP1 template files. starttime
        and endtime are exepcted to be datetime objects with the UTC timezone.
        Enough information needs to be provided in order to calculate a start date and end date.

        1) start_time and end_time
        2) start_time and duration_in_years
        3) epoch_time and end_time
        4) epoch_time and duration_in_years

        If start_time and start_epoch are both supplied, program will default to using start_time.
        If end_time and time_delta are both supplied, program will default to using end_time.

        This requires an internet connection and will fail if the script has no access to the server.

        Args:
            extra_comcat_params (dict): pass additional parameters to libcomcat
        """
        from libcomcat.search import search

        # get eventlist from Comcat
        eventlist = search(minmagnitude=self.min_magnitude,
                           minlatitude=self.min_latitude,
                           maxlatitude=self.max_latitude,
                           minlongitude=self.min_longitude,
                           maxlongitude=self.max_longitude,
                           starttime=self.start_time,
                           endtime=self.end_time,
                           **extra_comcat_params)

        # eventlist is converted to ndarray in _get_catalog_as_ndarray called from setter
        self.catalog = eventlist

        # update state because we just loaded a new catalog
        self.date_accessed = datetime.datetime.utcnow()
        self._update_catalog_stats()

        return self
def find_events(target, buffer):
    """
    Libcomcat query for potential matching events to the target.
    Args:
        target (dictionary):
            Dictionary of target event mag, time, and
            location values.
        buffer (dictionary): Dictionary of time, magnitude,
            and distance buffers.
    Returns:
        usgs_events (list): List of Libcomcat SummaryEvents.
    """
    usgs_event = search(
        starttime=target['time'] - buffer['time'],
        endtime=target['time'] + buffer['time'],
        latitude=target['lat'],
        longitude=target['lon'],
        maxradiuskm=buffer['dist'],
        maxmagnitude=target['mag'] + buffer['mag'],
        minmagnitude=target['mag'] - buffer['mag'])
    return usgs_event
Пример #22
0
 def updateSequences(self, stime):
     etime = stime + timedelta(days=1)
     events = search(starttime=stime,
                     endtime=etime,
                     minlatitude=-90,
                     maxlatitude=90,
                     minlongitude=-180,
                     maxlongitude=180,
                     minmagnitude=0.0,
                     maxmagnitude=9.9)
     todayframe = get_summary_data_frame(events)
     todaydata = get_day_counts(GDICT, todayframe)
     todaygrid = Grid2D(data=todaydata, geodict=GDICT)
     for row in range(0, GDICT.ny):
         for col in range(0, GDICT.nx):
             if row == 19 and col == 29:
                 foo = 1
             clat, clon = GDICT.getLatLon(row, col)
             tvalue = todaygrid._data[row, col]
             mvalue = self._meangrid._data[row, col]
             svalue = self._stdgrid._data[row, col]
             # thresh = tvalue > mvalue + svalue * 3
             thresh = tvalue > MINEQ
             xmin = clon - GDICT.dx / 2
             xmax = clon + GDICT.dx / 2
             ymin = clat - GDICT.dy / 2
             ymax = clat + GDICT.dy / 2
             if thresh:
                 c1 = todayframe['latitude'] > ymin
                 c2 = todayframe['latitude'] <= ymax
                 c3 = todayframe['longitude'] > xmin
                 c4 = todayframe['longitude'] <= xmax
                 cluster = todayframe[c1 & c2 & c3 & c4].copy()
                 class_frame, pproj = self.get_clusters(cluster, clon, clat)
                 self.insertSequences(class_frame, pproj)
     # call a method that filters out clusters that don't match the definition
     # of an earthquake sequence.
     self.cleanSequences()
Пример #23
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    setup_logger(args.logfile, args.loglevel)

    latitude = None
    longitude = None
    radiuskm = None
    lonmin = latmin = lonmax = latmax = None
    if args.radius:
        latitude = args.radius[0]
        longitude = args.radius[1]
        radiuskm = args.radius[2]

    if args.bounds:
        lonmin, lonmax, latmin, latmax = args.bounds
        # fix longitude bounds when crossing dateline
        if lonmin > lonmax and lonmax >= -180:
            lonmin -= 360
    else:
        lonmin, lonmax, latmin, latmax = None, None, None, None

    minmag = 0.0
    maxmag = 9.9
    if args.magRange:
        minmag = args.magRange[0]
        maxmag = args.magRange[1]

    if args.bounds and args.radius:
        print('Please specify either a bounding box OR radius search.')
        sys.exit(1)

    if args.eventid:
        event = get_event_by_id(args.eventid, includesuperseded=args.all)
        events = [event]
    else:
        events = search(starttime=args.startTime,
                        endtime=args.endTime,
                        updatedafter=args.after,
                        minlatitude=latmin,
                        maxlatitude=latmax,
                        minlongitude=lonmin,
                        maxlongitude=lonmax,
                        latitude=latitude,
                        longitude=longitude,
                        maxradiuskm=radiuskm,
                        maxmagnitude=maxmag,
                        minmagnitude=minmag,
                        producttype='losspager',
                        host=args.host)

    if not len(events):
        print('No events found matching your search criteria. Exiting.')
        sys.exit(0)

    dataframe = None
    nevents = len(events)
    i = 1
    for event in events:
        logging.debug('Processing event %s (%i of %i).\n' %
                      (event.id, i, nevents))

        if isinstance(event, SummaryEvent):
            detail = event.getDetailEvent(includesuperseded=args.all)
        else:
            detail = event
        df = get_pager_data_frame(detail,
                                  get_losses=args.get_losses,
                                  get_country_exposures=args.get_countries,
                                  get_all_versions=args.all)
        if dataframe is None:
            dataframe = df
        else:
            dataframe = pd.concat([dataframe, df])

    if dataframe is not None:
        logging.debug('Created table...saving %i records to %s.\n' %
                      (len(dataframe), args.filename))
        if args.format == 'excel':
            dataframe.to_excel(args.filename, index=False)
        elif args.format == 'tab':
            dataframe.to_csv(args.filename, sep='\t', index=False)
        else:
            dataframe.to_csv(args.filename, index=False, chunksize=1000)

        add_headers(args.filename, args.format)
        print('%i records saved to %s.' % (len(dataframe), args.filename))
    else:
        sys.stderr.write('No Pager products found for requested event(s)\n')
    sys.exit(0)
Пример #24
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    # make sure we don't have -e option AND --numdays option
    if args.endTime is not None and args.numdays is not None:
        msg = ('You must specify end time or number of days since '
               'start time, not both. Exiting.')
        print(msg)
        sys.exit(1)

    if not args.endTime and args.numdays:
        args.endTime = args.startTime + timedelta(args.numdays)

    setup_logger(args.logfile, args.loglevel)

    tsum = (args.bounds is not None) + \
        (args.radius is not None) + (args.country is not None)
    if tsum != 1:
        logging.error(
            'Please specify a bounding box, radius, or country code.')
        sys.exit(1)

    latitude = None
    longitude = None
    radiuskm = None
    lonmin = latmin = lonmax = latmax = None
    bounds = None
    if args.radius:
        latitude = args.radius[0]
        longitude = args.radius[1]
        radiuskm = args.radius[2]

    if args.bounds:
        lonmin, lonmax, latmin, latmax = args.bounds
        # fix longitude bounds when crossing dateline
        if lonmin > lonmax and lonmax >= -180:
            lonmin -= 360
    else:
        lonmin, lonmax, latmin, latmax = None, None, None, None
        bounds = (lonmin, lonmax, latmin, latmax)

    if args.country:
        ccode = args.country
        if not check_ccode(ccode):
            curl = 'https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2'
            fmt = ('%s is not a valid ISO 3166 country code. '
                   'See %s for the list.')
            tpl = (ccode, curl)
            logging.error(fmt % tpl)
            sys.exit(1)
        bounds = get_country_bounds(ccode, args.buffer)  # this returns a list

    minmag = 0.0
    maxmag = 9.9
    if args.magRange:
        minmag = args.magRange[0]
        maxmag = args.magRange[1]

    minsig = 0
    maxsig = 5000
    if args.sigRange:
        minsig = args.sigRange[0]
        maxsig = args.sigRange[1]

    if args.getCount:
        if isinstance(bounds, tuple) or bounds is None:
            nevents = count(starttime=args.startTime,
                            endtime=args.endTime,
                            updatedafter=args.after,
                            minlatitude=latmin,
                            maxlatitude=latmax,
                            minlongitude=lonmin,
                            maxlongitude=lonmax,
                            latitude=latitude,
                            longitude=longitude,
                            maxradiuskm=radiuskm,
                            catalog=args.catalog,
                            contributor=args.contributor,
                            maxmagnitude=maxmag,
                            minmagnitude=minmag,
                            minsig=minsig,
                            maxsig=maxsig,
                            producttype=args.limitByProductType)
        else:
            for lonmin, lonmax, latmin, latmax in bounds:
                nevents = 0
                nevents += count(starttime=args.startTime,
                                 endtime=args.endTime,
                                 updatedafter=args.after,
                                 minlatitude=latmin,
                                 maxlatitude=latmax,
                                 minlongitude=lonmin,
                                 maxlongitude=lonmax,
                                 latitude=latitude,
                                 longitude=longitude,
                                 maxradiuskm=radiuskm,
                                 catalog=args.catalog,
                                 contributor=args.contributor,
                                 minsig=minsig,
                                 maxsig=maxsig,
                                 maxmagnitude=maxmag,
                                 minmagnitude=minmag,
                                 producttype=args.limitByProductType)
        print('There are %i events matching input criteria.' % nevents)
        sys.exit(0)
    if isinstance(bounds, tuple) or bounds is None:
        events = search(starttime=args.startTime,
                        endtime=args.endTime,
                        updatedafter=args.after,
                        minlatitude=latmin,
                        maxlatitude=latmax,
                        minlongitude=lonmin,
                        maxlongitude=lonmax,
                        latitude=latitude,
                        longitude=longitude,
                        maxradiuskm=radiuskm,
                        catalog=args.catalog,
                        contributor=args.contributor,
                        maxmagnitude=maxmag,
                        minmagnitude=minmag,
                        minsig=minsig,
                        maxsig=maxsig,
                        producttype=args.limitByProductType,
                        host=args.host,
                        eventtype=args.event_type,
                        alertlevel=args.alert_level)
    else:
        events = []
        for i, tbounds in enumerate(bounds):
            lonmin, lonmax, latmin, latmax = tbounds
            fmt = 'Checking bounds %i of %i for %s...\n'
            tpl = (i + 1, len(bounds), ccode)
            logging.debug(fmt % tpl)
            tevents = search(starttime=args.startTime,
                             endtime=args.endTime,
                             updatedafter=args.after,
                             minlatitude=latmin,
                             maxlatitude=latmax,
                             minlongitude=lonmin,
                             maxlongitude=lonmax,
                             latitude=latitude,
                             longitude=longitude,
                             maxradiuskm=radiuskm,
                             catalog=args.catalog,
                             contributor=args.contributor,
                             maxmagnitude=maxmag,
                             minmagnitude=minmag,
                             minsig=minsig,
                             maxsig=maxsig,
                             producttype=args.limitByProductType,
                             host=args.host,
                             eventtype=args.event_type,
                             alertlevel=args.alert_level)
            events += tevents

    if not len(events):
        logging.info('No events found matching your search criteria. Exiting.')
        sys.exit(0)

    if (args.getAngles != 'none' or
            args.getAllMags or
            args.getComponents != 'none'):

        logging.info(
            'Fetched %i events...creating table.\n' % (len(events)))
        supp = args.getMomentSupplement
        df = get_detail_data_frame(events, get_all_magnitudes=args.getAllMags,
                                   get_tensors=args.getComponents,
                                   get_focals=args.getAngles,
                                   get_moment_supplement=supp)
    else:
        logging.info(
            'Fetched %i events...creating summary table.\n' % (len(events)))
        df = get_summary_data_frame(events)

    # order the columns so that at least the initial parameters come the way
    # we want them...
    first_columns = list(events[0].toDict().keys())
    col_list = list(df.columns)
    for column in first_columns:
        try:
            col_list.remove(column)
        except Exception as e:
            x = 1
    df = df[first_columns + col_list]

    if args.country:
        df = filter_by_country(df, ccode, buffer_km=args.buffer)

    logging.info('Created table...saving %i records to %s.\n' %
                 (len(df), args.filename))
    if args.format == 'excel':
        df.to_excel(args.filename, index=False)
    elif args.format == 'tab':
        df.to_csv(args.filename, sep='\t', index=False)
    else:
        df.to_csv(args.filename, index=False, chunksize=1000)
    logging.info('%i records saved to %s.' % (len(df), args.filename))
    sys.exit(0)
Пример #25
0
def main():
    """
    Read in data and add comcat IDs, download rupture file if available.
    """
    eq_df = pd.read_csv(EVENT_FILE, sep=" ", header=None, names=EVENT_COLS)
    eq_df['comcat_id'] = ''
    nrows = eq_df.shape[0]

    for i in range(nrows):
        print('i = %i' % i)

        eqmag = float(eq_df['mag'][i])
        dmag = 0.3
        min_mag = eqmag - dmag
        max_mag = eqmag + dmag

        edate = eq_df['date'][i]
        etime = eq_df['time'][i]
        edatetime = edate + ' ' + etime

        eqdatetime = datetime.datetime.strptime(edatetime, '%Y/%m/%d %H:%M:%S')
        start_time = eqdatetime - datetime.timedelta(1)  # -1 day
        end_time = eqdatetime + datetime.timedelta(1)  # +1 day

        dll = 0.1
        eq_lat = float(eq_df['lat'][i])
        min_latitude = eq_lat - dll
        max_latitude = eq_lat + dll

        eq_lon = float(eq_df['lon'][i])
        min_longitude = eq_lon - dll
        max_longitude = eq_lon + dll

        summary_list = search(starttime=start_time,
                              endtime=end_time,
                              minlatitude=min_latitude,
                              maxlatitude=max_latitude,
                              minlongitude=min_longitude,
                              maxlongitude=max_longitude,
                              minmagnitude=min_mag,
                              maxmagnitude=max_mag)

        if len(summary_list):
            summary_event = summary_list[0]
            detail = summary_event.getDetailEvent()
            eq_df.at[i, 'comcat_id'] = detail.id

            if (eqmag >= 5.5) & (detail.hasProduct('shakemap')):
                outdir = os.path.join('..', 'data', 'ruptures', detail.id)
                if not os.path.exists(outdir):
                    os.makedirs(outdir)
                shake = detail.getProducts('shakemap', source='preferred')[0]
                outfile = os.path.join(outdir, 'rupture.json')
                shake.getContent('rupture.json', outfile)

                # If it is a point source, no need for it so remove it.
                rup = get_rupture(origin, outfile)
                if isinstance(rup, PointRupture):
                    shutil.rmtree(outdir)

        new_file = 'events_comcat.csv'
        eq_df.to_csv(new_file, index=True)
Пример #26
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 11 12:00:47 2020
code to download earthquake catalog
remember to load correct environment in order to use the libcomcat library
@author: talongi
"""

from libcomcat.dataframes import get_summary_data_frame, get_detail_data_frame
from libcomcat.search import search, DetailEvent
from datetime import datetime

events = search(starttime=datetime(2006, 10, 30),
                endtime=datetime(2006, 11, 15),
                minlatitude=40.4,
                maxlatitude=40.6,
                minlongitude=-125,
                maxlongitude=-123)

detail_events = get_detail_data_frame(events)

#detail_events.to_csv('/auto/home/talongi/Cascadia/Data_tables/Events/ANSS_1980-2019_detailed.csv')

#%%
#find nans
import numpy as np
import pandas as pd

index_match = []
for i, x in enumerate(detail_events.nc_np1_strike):
    if type(x) == str:
Пример #27
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    # --host and --scenario are mutually exclusive
    if args.host is not None and args.scenario:
        print(
            '--host and --scenario options are mutually exclusive. Please choose one.'
        )
        sys.exit(1)

    setup_logger(args.logfile, args.loglevel)

    if args.eventid:
        detail = get_event_by_id(args.eventid,
                                 includesuperseded=True,
                                 scenario=args.scenario)
        _get_product_from_detail(detail,
                                 args.product,
                                 args.contents,
                                 args.outputFolder,
                                 args.version,
                                 args.source,
                                 list_only=args.list_only)
        sys.exit(0)

    tsum = (args.bounds is not None) + \
        (args.radius is not None) + (args.country is not None)
    if tsum != 1:
        print('Please specify a bounding box, radius, or country code.')
        sys.exit(1)

    latitude = None
    longitude = None
    radiuskm = None
    lonmin = latmin = lonmax = latmax = None

    if args.startTime is None:
        starttime = datetime.utcnow() - timedelta(days=30)
        print('You did not specify a search start time, defaulting to %s' %
              str(starttime))
    else:
        starttime = args.startTime

    if args.endTime is None:
        endtime = datetime.utcnow()
        print('You did not specify a search end time, defaulting to %s' %
              str(endtime))
    else:
        endtime = args.endTime

    bounds = None
    if args.radius:
        latitude = args.radius[0]
        longitude = args.radius[1]
        radiuskm = args.radius[2]

    if args.bounds:
        lonmin, lonmax, latmin, latmax = args.bounds
        # fix longitude bounds when crossing dateline
        if lonmin > lonmax and lonmax >= -180:
            lonmin -= 360
    else:
        lonmin, lonmax, latmin, latmax = None, None, None, None
        bounds = (lonmin, lonmax, latmin, latmax)

    if args.country:
        ccode = args.country
        if not check_ccode(ccode):
            curl = 'https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2'
            fmt = '%s is not a valid ISO 3166 country code. See %s for the list.'
            tpl = (ccode, curl)
            print(fmt % tpl)
            sys.exit(1)
        bounds = get_country_bounds(ccode, args.buffer)  # this returns a list

    minmag = 0.0
    maxmag = 9.9
    if args.magRange:
        minmag = args.magRange[0]
        maxmag = args.magRange[1]

    if isinstance(bounds, tuple) or bounds is None:
        events = search(starttime=starttime,
                        endtime=endtime,
                        updatedafter=args.after,
                        minlatitude=latmin,
                        maxlatitude=latmax,
                        minlongitude=lonmin,
                        maxlongitude=lonmax,
                        latitude=latitude,
                        longitude=longitude,
                        maxradiuskm=radiuskm,
                        catalog=args.catalog,
                        contributor=args.contributor,
                        producttype=args.product,
                        eventtype=args.eventType,
                        maxmagnitude=maxmag,
                        minmagnitude=minmag,
                        scenario=args.scenario,
                        host=args.host)
    else:
        events = []
        for i, tbounds in enumerate(bounds):
            lonmin, lonmax, latmin, latmax = tbounds
            tevents = search(starttime=starttime,
                             endtime=endtime,
                             updatedafter=args.after,
                             minlatitude=latmin,
                             maxlatitude=latmax,
                             minlongitude=lonmin,
                             maxlongitude=lonmax,
                             latitude=latitude,
                             longitude=longitude,
                             maxradiuskm=radiuskm,
                             catalog=args.catalog,
                             contributor=args.contributor,
                             producttype=args.product,
                             eventtype=args.eventType,
                             maxmagnitude=maxmag,
                             minmagnitude=minmag,
                             scenario=args.scenario,
                             host=args.host)
            events += tevents

    if not len(events):
        print('No events found matching your search criteria. Exiting.')
        sys.exit(0)

    if args.country:
        ids = [event.id for event in events]
        lats = [event.latitude for event in events]
        lons = [event.longitude for event in events]
        df = pd.DataFrame({'id': ids, 'latitude': lats, 'longitude': lons})
        df2 = filter_by_country(df, ccode, buffer_km=args.buffer)
        events = [event for event in events if event.id in df2['id'].unique()]

    for event in events:
        logging.debug('Retrieving products for event %s...' % event.id)
        if not event.hasProduct(args.product):
            continue
        try:
            detail = event.getDetailEvent(includesuperseded=True,
                                          scenario=args.scenario)
        except Exception as e:
            print(
                'Failed to retrieve detail event for event %s... continuing.' %
                event.id)
            continue
        _get_product_from_detail(detail,
                                 args.product,
                                 args.contents,
                                 args.outputFolder,
                                 args.version,
                                 args.source,
                                 list_only=args.list_only)

    sys.exit(0)
Пример #28
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    setup_logger(args.logfile, args.loglevel)

    if args.eventid:
        detail = get_event_by_id(args.eventid, catalog=args.catalog)
        try:
            df = get_phase_dataframe(detail, args.catalog)
            filename = save_dataframe(
                df, args.directory, detail, args.format, catalog=args.catalog)
            print('Saved phase data for %s to %s' % (detail.id, filename))
            sys.exit(0)
        except Exception as e:
            fmt = ('Could not extract the phase data due to the '
                   'following error: \n"%s"\n\nExiting.')
            print(fmt % (str(e)))
            sys.exit(1)

    if args.bounds and args.radius:
        print('Please specify either a bounding box OR radius search.')
        sys.exit(1)

    if not os.path.isdir(args.directory):
        os.makedirs(args.directory)

    latitude = None
    longitude = None
    radiuskm = None
    lonmin = latmin = lonmax = latmax = None
    starttime = endtime = None
    if args.radius:
        latitude = args.radius[0]
        longitude = args.radius[1]
        radiuskm = args.radius[2]

    if args.bounds:
        lonmin, lonmax, latmin, latmax = args.bounds
        # fix longitude bounds when crossing dateline
        if lonmin > lonmax and lonmax >= -180:
            lonmin -= 360

    minmag = 0.0
    maxmag = 9.9
    if args.magRange:
        minmag = args.magRange[0]
        maxmag = args.magRange[1]

    events = search(starttime=args.startTime,
                    endtime=args.endTime,
                    updatedafter=args.after,
                    minlatitude=latmin,
                    maxlatitude=latmax,
                    minlongitude=lonmin,
                    maxlongitude=lonmax,
                    latitude=latitude,
                    longitude=longitude,
                    maxradiuskm=radiuskm,
                    catalog=args.catalog,
                    contributor=args.contributor,
                    maxmagnitude=maxmag,
                    minmagnitude=minmag)

    if not len(events):
        print('No events found matching your search criteria. Exiting.')
        sys.exit(0)

    for event in events:
        if not event.hasProduct('phase-data'):
            continue
        try:
            detail = event.getDetailEvent()
            try:
                df = get_phase_dataframe(detail, args.catalog)
            except Exception as e:
                fmt = ('Could not get phase dataframe for '
                       'event %. Error "%s". Continuing.')
                tpl = (detail.id, str(e))
                print(fmt % tpl)
            filename = save_dataframe(
                df, args.directory, detail, args.format, catalog=args.catalog)

            print('Saved phase data for %s to %s' % (event.id, filename))
        except Exception as e:
            print('Failed to retrieve phase data for event %s.  Error "%s"... continuing.' % (
                event.id, str(e)))
            continue
Пример #29
0
def main(args):
    
    intervalno = args.intervalno
    directory = args.directory
    if intervalno > 11:
        if args.previous is not None:
            previous = args.previous
        else:
            print ('previous file required at input if interval no > 11.')
            print ('enter previous file with flag -p previousFile at input')
            print ('Exiting ...')
            exit()

    os.system('mkdir %s'%directory)

    if intervalno == 1:
        start = datetime(1900, 1, 1)
        finish = datetime(1970, 1, 1)
    if intervalno == 2:
        start = datetime(1970, 1, 1)
        finish = datetime(1980, 1, 1)
    if intervalno == 3:
        start = datetime(1980, 1, 1)
        finish = datetime(1990, 1, 1)
    if intervalno == 4:
        start = datetime(1990, 1, 1)
        finish = datetime(1995, 1, 1)
    if intervalno == 5:
        start = datetime(1995, 1, 1)
        finish = datetime(2000, 1, 1)
    if intervalno == 6:
        start = datetime(2000, 1, 1)
        finish = datetime(2005, 1, 1)
    if intervalno == 7:
        start = datetime(2005, 1, 1)
        finish = datetime(2010, 1, 1)
    if intervalno == 8:
        start = datetime(2010, 1, 1)
        finish = datetime(2012, 1, 1)
    if intervalno == 9:
        start = datetime(2012, 1, 1)
        finish = datetime(2014, 1, 1)
    if intervalno == 10:
        start = datetime(2014, 1, 1)
        finish = datetime(2016, 1, 1)
    if intervalno == 11:
        start = datetime(2016, 1, 1)
        finish = datetime.utcnow()
    if intervalno > 11:
        predata = pd.read_csv(previous)
        predata['time'] = pd.to_datetime(predata['time'])
        start = predata['time'].max()
        finish = datetime.utcnow()

    print (start)
    print (finish)

    #define magnitude range of earthquakes to search over for shallow earthquakes
    #(these are whatever magnitude range is defined in the catalogues)
    min_mag = 3.0
    max_mag = 9.9
    magrange = (min_mag, max_mag)

    #define depth to search over
    min_sh = 0
    max_sh = 900
    depthrange_sh = (min_sh, max_sh)

    #define grid size and extent of search (representative of lower left corner)
    if intervalno > 11:
        grid = 50.0
    else:
        grid = 10.0
    lonmin, lonmax = -180, 180
    latmin, latmax = -75, 75

    #define grid of searches (representative of lower left corner)
    xall = np.arange(lonmin,lonmax,grid)
    yall = np.arange(latmin,latmax,grid)
    lons1,lats1 = np.meshgrid(xall,yall)

    #flatten into list of lower left corners
    lllons = lons1.flatten()
    lllats = lats1.flatten()

    #define lists representing upper right corners
    urlons = lllons+grid
    urlats = lllats+grid

    #combine into one array (lonmin,lonmax,latmin,latmax)
    bounds = np.zeros((len(lllons),4))
    bounds[:,0] = lllons
    bounds[:,1] = urlons
    bounds[:,2] = lllats
    bounds[:,3] = urlats
    iterations = len(bounds)

    lllatfail = []
    lllonfail = []
    urlatfail = []
    urlonfail = []

    totdf = pd.DataFrame()
    num = 0
    for i, line in enumerate(bounds):

        bounds = line

        #to follow along with the progress of data querying
        #since there are a lot of iterations, only print every so often
        k = 100
        if i % k == 0:
            
            print ('Now querying grid %s of %s' % (i, iterations))
        
        searchlist = search(starttime=start, endtime=finish, minlatitude=bounds[2], maxlatitude=bounds[3], minlongitude=bounds[0], maxlongitude=bounds[1],  minmagnitude=3.0)

        if len(searchlist) > 0:
            detaildf = get_detail_data_frame(searchlist,get_tensors='preferred',get_moment_supplement=True)

            totdf = pd.concat([totdf,detaildf])
            print (bounds,len(detaildf),len(totdf))

            if len(totdf) > 5000:
                totdf.to_csv('%s/%s_%i.%i.csv'%(directory,directory,intervalno,num),header=True,index=False,na_rep=np.nan)
                num += 1
                totdf = pd.DataFrame()


    totdf.to_csv('%s/%s_%i.csv'%(directory,directory,intervalno),header=True,index=False,na_rep=np.nan)
Пример #30
0
def main(args):

    intervalno = args.intervalno
    directory = args.directory
    if intervalno > 11:
        if args.previous is not None:
            previous = args.previous
        else:
            print('previous file required at input if interval no > 11.')
            print('enter previous file with flag -p previousFile at input')
            print('Exiting ...')
            exit()

    os.system('mkdir %s' % directory)

    if intervalno == 1:
        start = datetime(1900, 1, 1)
        finish = datetime(1970, 1, 1)
    if intervalno == 2:
        start = datetime(1970, 1, 1)
        finish = datetime(1980, 1, 1)
    if intervalno == 3:
        start = datetime(1980, 1, 1)
        finish = datetime(1990, 1, 1)
    if intervalno == 4:
        start = datetime(1990, 1, 1)
        finish = datetime(1995, 1, 1)
    if intervalno == 5:
        start = datetime(1995, 1, 1)
        finish = datetime(2000, 1, 1)
    if intervalno == 6:
        start = datetime(2000, 1, 1)
        finish = datetime(2005, 1, 1)
    if intervalno == 7:
        start = datetime(2005, 1, 1)
        finish = datetime(2010, 1, 1)
    if intervalno == 8:
        start = datetime(2010, 1, 1)
        finish = datetime(2012, 1, 1)
    if intervalno == 9:
        start = datetime(2012, 1, 1)
        finish = datetime(2014, 1, 1)
    if intervalno == 10:
        start = datetime(2014, 1, 1)
        finish = datetime(2016, 1, 1)
    if intervalno == 11:
        start = datetime(2016, 1, 1)
        finish = datetime.utcnow()
    if intervalno > 11:
        predata = pd.read_csv(previous)
        predata['time'] = pd.to_datetime(predata['time'])
        start = predata['time'].max()
        finish = datetime.utcnow()

    print(start)
    print(finish)

    #define magnitude range of earthquakes to search over for shallow earthquakes
    #(these are whatever magnitude range is defined in the catalogues)
    min_mag = 3.0
    max_mag = 9.9
    magrange = (min_mag, max_mag)

    #define depth to search over
    min_sh = 0
    max_sh = 900
    depthrange_sh = (min_sh, max_sh)

    #define grid size and extent of search (representative of lower left corner)
    if intervalno > 11:
        grid = 50.0
    else:
        grid = 10.0
    lonmin, lonmax = -180, 180
    latmin, latmax = -75, 75

    #define grid of searches (representative of lower left corner)
    xall = np.arange(lonmin, lonmax, grid)
    yall = np.arange(latmin, latmax, grid)
    lons1, lats1 = np.meshgrid(xall, yall)

    #flatten into list of lower left corners
    lllons = lons1.flatten()
    lllats = lats1.flatten()

    #define lists representing upper right corners
    urlons = lllons + grid
    urlats = lllats + grid

    #combine into one array (lonmin,lonmax,latmin,latmax)
    bounds = np.zeros((len(lllons), 4))
    bounds[:, 0] = lllons
    bounds[:, 1] = urlons
    bounds[:, 2] = lllats
    bounds[:, 3] = urlats
    iterations = len(bounds)

    lllatfail = []
    lllonfail = []
    urlatfail = []
    urlonfail = []

    totdf = pd.DataFrame()
    num = 0
    for i, line in enumerate(bounds):

        bounds = line

        #to follow along with the progress of data querying
        #since there are a lot of iterations, only print every so often
        k = 100
        if i % k == 0:

            print('Now querying grid %s of %s' % (i, iterations))

        searchlist = search(starttime=start,
                            endtime=finish,
                            minlatitude=bounds[2],
                            maxlatitude=bounds[3],
                            minlongitude=bounds[0],
                            maxlongitude=bounds[1],
                            minmagnitude=3.0)

        if len(searchlist) > 0:
            detaildf = get_detail_data_frame(searchlist,
                                             get_tensors='preferred',
                                             get_moment_supplement=True)

            totdf = pd.concat([totdf, detaildf])
            print(bounds, len(detaildf), len(totdf))

            if len(totdf) > 5000:
                totdf.to_csv('%s/%s_%i.%i.csv' %
                             (directory, directory, intervalno, num),
                             header=True,
                             index=False,
                             na_rep=np.nan)
                num += 1
                totdf = pd.DataFrame()

    totdf.to_csv('%s/%s_%i.csv' % (directory, directory, intervalno),
                 header=True,
                 index=False,
                 na_rep=np.nan)
Пример #31
0
def get_event_comcat(shakefile, timewindow=60, degwindow=0.3, magwindow=0.2):
    """
    Find an event in comcat, searching first by event id and if that
    fails searching by magnitude, time, and location.

    Args:
        shakefile (str): path to shakemap .xml file of event to find
        timewindow (float): width of time window to search around time defined
            in shakefile (in seconds)
        degwindow (float): width of area to search around location specified in
            shakefile (in degrees).
        magwindow (float): width of magnitude window to search around the
            magnitude specified in shakefile.

    Returns:
        None if event not found, else tuple (info, detail, shakemap) where,
            * info: json formatted dictionary of info.json for the event
            * detail: event detail from comcat
            * shakemap: shakemap of event found (from comcat)

    """
    header_dicts = getHeaderData(shakefile)
    grid_dict = header_dicts[0]
    event_dict = header_dicts[1]
    version = grid_dict['shakemap_version']
    try:
        eid = event_dict['event_id']
        net = 'us'
        if 'event_network' in event_dict:
            net = event_dict['event_network']
        if not eid.startswith(net):
            eid = net + eid
        detail = get_event_by_id(eid, includesuperseded=True)
    except:
        lat = event_dict['lat']
        lon = event_dict['lon']
        mag = event_dict['magnitude']
        time = event_dict['event_timestamp']
        starttime = time - timedelta(seconds=timewindow)
        endtime = time + timedelta(seconds=timewindow)
        minlat = lat - degwindow
        minlon = lon - degwindow
        maxlat = lat + degwindow
        maxlon = lon + degwindow
        minmag = max(0, mag - magwindow)
        maxmag = min(10, mag + magwindow)
        events = search(starttime=starttime,
                        endtime=endtime,
                        minmagnitude=minmag,
                        maxmagnitude=maxmag,
                        minlatitude=minlat,
                        minlongitude=minlon,
                        maxlatitude=maxlat,
                        maxlongitude=maxlon)
        if not len(events):
            return None
        detail = events[0].getDetailEvent()
    allversions = detail.getProducts('shakemap', version=VersionOption.ALL)
    # Find the right version
    vers = [allv.version for allv in allversions]
    idx = np.where(np.array(vers) == version)[0][0]
    shakemap = allversions[idx]
    infobytes, url = shakemap.getContentBytes('info.json')
    info = json.loads(infobytes.decode('utf-8'))
    return info, detail, shakemap
Пример #32
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    setup_logger(args.logfile, args.loglevel)

    latitude = None
    longitude = None
    radiuskm = None
    lonmin = latmin = lonmax = latmax = None
    if args.radius:
        latitude = args.radius[0]
        longitude = args.radius[1]
        radiuskm = args.radius[2]

    if args.bounds:
        lonmin, lonmax, latmin, latmax = args.bounds
        # fix longitude bounds when crossing dateline
        if lonmin > lonmax and lonmax >= -180:
            lonmin -= 360
    else:
        lonmin, lonmax, latmin, latmax = None, None, None, None

    minmag = 0.0
    maxmag = 9.9
    if args.magRange:
        minmag = args.magRange[0]
        maxmag = args.magRange[1]

    if args.getCount:
        nevents = count(starttime=args.startTime,
                        endtime=args.endTime,
                        updatedafter=args.after,
                        minlatitude=latmin,
                        maxlatitude=latmax,
                        minlongitude=lonmin,
                        maxlongitude=lonmax,
                        latitude=latitude,
                        longitude=longitude,
                        maxradiuskm=radiuskm,
                        maxmagnitude=maxmag,
                        minmagnitude=minmag,
                        verbose=args.verbose)
        print('There are %i events matching input criteria.' % nevents)
        sys.exit(0)

    if args.bounds and args.radius:
        print('Please specify either a bounding box OR radius search.')
        sys.exit(1)

    events = search(starttime=args.startTime,
                    endtime=args.endTime,
                    updatedafter=args.after,
                    minlatitude=latmin,
                    maxlatitude=latmax,
                    minlongitude=lonmin,
                    maxlongitude=lonmax,
                    latitude=latitude,
                    longitude=longitude,
                    maxradiuskm=radiuskm,
                    maxmagnitude=maxmag,
                    minmagnitude=minmag,
                    verbose=args.verbose)

    if not len(events):
        print('No events found matching your search criteria. Exiting.')
        sys.exit(0)

    # create a dataframe with these columns - we'll add more later
    df = pd.DataFrame(columns=['id', 'time', 'lat', 'lon', 'depth',
                               'location', 'url', 'hypo_src'])
    ievent = 1

    for event in events:
        id_list = event['ids'].split(',')[1:-1]
        source = event.id.replace(event['code'], '')
        row = pd.Series(data={'id': event.id,
                              'time': event.time,
                              'lat': event.latitude,
                              'lon': event.longitude,
                              'depth': event.depth,
                              'location': event.location,
                              'url': event.url,
                              'hypo_src': source})

        imag = 1

        if args.verbose:
            tpl = (event.id, ievent, len(events), len(id_list))
            print('Parsing event %s (%i of %i) - %i origins' % tpl)
        ievent += 1
        errors = []
        mags = {}
        for eid in id_list:
            magtypes, msg = get_all_mags(eid)
            if args.verbose and len(msg):
                print(msg)
            mags.update(magtypes)
            imag += 1
        row = pd.concat([row, pd.Series(mags)])
        df = df.append(row, ignore_index=True)

    if len(errors):
        print('Some events could not be retrieved:')
        for error in errors:
            print('\t%s' % error)

    if args.format == 'excel':
        df.to_excel(args.filename, index=False)
    else:
        df.to_csv(args.filename, index=False)
    print('%i records saved to %s.' % (len(df), args.filename))
    sys.exit(0)
Пример #33
0
from datetime import datetime
from libcomcat.dataframes import get_detail_data_frame
from libcomcat.search import search, get_event_by_id

summary_events = search(starttime=datetime(2010, 1, 1, 00, 00),
                        endtime=datetime(2019, 12, 31, 23, 59),
                        minlatitude=45,
                        maxlatitude=72,
                        minlongitude=-180,
                        maxlongitude=-125,
                        minmagnitude=4)
detail_df = get_detail_data_frame(summary_events,
                                  get_tensors='all',
                                  get_focals='all',
                                  get_moment_supplement=True)
detail_df.to_pickle('usgs_alaska_2010.p')
Пример #34
0
def searchComCatforLandslides(starttime, endtime, lslat, lslon, network,
                              station):
    """
    Returns dataframe of landslides that were recorded in ComCat between 
    starttime and endtime, in addition to other ComCat info.
    INPUTS
    starttime (UTCDateTime) - earliest time earthquakes in ComCat must 
        have occurred to be removed from stream object
    endtime (UTCDateTime) - latest time earthquakes in ComCat must have occurred
        to be removed from stream object
    lslat (float) - latitudinal coordinate of landslide (make negative for south
        of Equator)
    lslon (float) - longitudinal coordinate of landslide (make negative for west
        of Prime Meridian)
    network (str) - seismic network code corresponding to station closest to 
        landslide, comma separated in a single string
        Example: 'NF,IW,RE,TA,UU'
    station (str) - name of seismic station closest to landslide, three letter
        string that is not case-sensitive
        Example: 'BFR,WOY,TCR,WTM'
    OUTPUT
    lsdf - pandas dataframe containing event IDs of landslides and potential
        landslides in ComCat ('id'), event dates and times ('date'), event 
        coordinates ('latitude' and 'longitude'), magnitude ('magnitude'), depth 
        ('depth'), distance to landslide we are searching for in km ('distance'),
        and event type in ComCat ('eventtype'). 
    """

    # Convert UTCDateTimes into datetimes
    eqstartdt = starttime.datetime
    eqenddt = endtime.datetime

    # Get list of earthquakes during time window from ComCat
    tempquakes = search(starttime=eqstartdt, endtime=eqenddt, minmagnitude=2.0)

    quakeids = []
    quakedates = []
    quakelats = []
    quakelons = []
    quakedepths = []
    quakemags = []
    quakedistskm = []
    quaketypes = []

    # Save attributes to lists
    for quake in tempquakes:
        quakeids.append(quake.id)
        quakedates.append(quake.time)
        quakelats.append(quake.latitude)
        quakelons.append(quake.longitude)
        quakedepths.append(quake.depth)
        quakemags.append(quake.magnitude)
        quakedistskm.append(quake.distance)
        quaketypes.append('earthquake')

    # Get list of landslides during time window from ComCat
    tempslides = search(starttime=eqstartdt,
                        endtime=eqenddt,
                        minmagnitude=2.0,
                        eventtype='landslide')

    # Save attributes to lists
    for slide in tempslides:
        quakeids.append(slide.id)
        quakedates.append(slide.time)
        quakelats.append(slide.latitude)
        quakelons.append(slide.longitude)
        quakedepths.append(slide.depth)
        quakemags.append(slide.magnitude)
        quaketypes.append('landslide')

        # Calculate distance between landslides
        dkm, ddeg = calcCoordDistance(lslat, lslon, slide.latitude,
                                      slide.longitude)
        quakedistskm.append(dkm)

    # Combine quake lists into pandas dataframe
    lsdf = pd.DataFrame({
        'id': quakeids,
        'date': quakedates,
        'latitude': quakelats,
        'longitude': quakelons,
        'depth': quakedepths,
        'magnitude': quakemags,
        'distance': quakedistskm,
        'event type': quaketypes
    })

    if len(lsdf) > 0:
        # Filter events by magnitude
        minmag = 2.0
        maxmag = 5.0
        # Create dataframe to hold quakes we don't think are landslides
        remove_lsdf = lsdf[(lsdf.magnitude > maxmag) |
                           (lsdf.magnitude < minmag)]
        lsdf = lsdf[(lsdf.magnitude <= maxmag) & (lsdf.magnitude >= minmag)]

        # Filter events by depth (looking for shallow earthquakes)
        maxdepth = 7.0  # km
        remove_lsdf = remove_lsdf.append(lsdf[lsdf.depth > maxdepth])
        lsdf = lsdf[lsdf.depth <= maxdepth]

        # Filter events by distance from landslide
        maxdist = 10.0  # km
        remove_lsdf = remove_lsdf.append(lsdf[lsdf.distance > maxdist])
        lsdf = lsdf[lsdf.distance <= maxdist]

        # Include events if event type is 'landslide', regardless of magnitude,
        # depth, or distance
        lsdf = lsdf.append(
            remove_lsdf[remove_lsdf['event type'] == 'landslide'])

    return (lsdf)
Пример #35
0
def test_get_detail_data_frame():
    events = search(starttime=datetime(1994,6,1),
                    endtime=datetime(1994,10,6),
                    minmagnitude=8.0,maxmagnitude=9.0)
    all_mags = get_detail_data_frame(events,get_all_magnitudes=True,verbose=True)
    assert all_mags.iloc[0]['magnitude'] == 8.2
Пример #36
0
def getQuakeDict(starttime, endtime, network, station):
    """
    Determines when earthquakes from ComCat arrived at seismic station closest
    to landslide, returns dictionary containing earthquake arrival times and
    other identifying information.
    INPUTS
    starttime (UTCDateTime) - earliest time earthquakes in ComCat must 
        have occurred to be returned
    endtime (UTCDateTime) - latest time earthquakes in ComCat must have 
        occurred to be returned
    network (str) - seismic network code corresponding to station closest to 
        landslide, comma separated in a single string
        Example: 'NF,IW,RE,TA,UU'
    station (str) - name of seismic station closest to landslide, three letter
        string that is not case-sensitive
        Example: 'BFR,WOY,TCR,WTM'
    OUTPUT
    quakedict - dictionary containing full earthquake summaries from ComCat
        ('summary'), earthquake IDs in ComCat ('id'), dates and times ('date'),
        coordinates of earthquake epicenter ('latitude' and 'longitude'), depth
        to earthquake epicenter ('depth'), distance of earthquake to landslide 
        in km ('distance'), earthquake magnitude ('magnitude'), calculated 
        arrival time of earthquake at seismic station closest to landslide
        ('arrival time'), and event type ('event type') -- 'earthquake' for 
        all events here.
    """

    # Get coordinates of nearest station to landslide
    stationlat, stationlon = getStationCoordinates(network, station)

    # Get list of earthquakes that happened between starttime and endtime
    # from ComCat
    eqstartdt = starttime.datetime
    eqenddt = endtime.datetime
    tempquakes = search(starttime=eqstartdt,
                        endtime=eqenddt,
                        minmagnitude=0.5,
                        mindepth=0.0)

    quakeids = []
    quakedates = []
    quakelats = []
    quakelons = []
    quakedepths = []
    quakemags = []

    # Save attributes to lists
    for quake in tempquakes:
        quakeids.append(quake.id)
        quakedates.append(quake.time)
        quakelats.append(quake.latitude)
        quakelons.append(quake.longitude)
        quakedepths.append(quake.depth)
        quakemags.append(quake.magnitude)

    # Create dictionary for attributes
    quakedict = {}
    eqkeys = [
        'id', 'date', 'latitude', 'longitude', 'depth', 'distance',
        'magnitude', 'arrival time', 'event type'
    ]
    for key in eqkeys:
        quakedict[key] = []

    quakedict['id'] = [quakeid for quakeid in quakeids]
    quakedict['date'] = [quakedate for quakedate in quakedates]
    quakedict['latitude'] = [quakelat for quakelat in quakelats]
    quakedict['longitude'] = [quakelon for quakelon in quakelons]
    quakedict['depth'] = [quakedepth for quakedepth in quakedepths]
    quakedict['magnitude'] = [quakemag for quakemag in quakemags]
    quakedict['event type'] = ['earthquake'] * len(quakeids)

    quakedistskm, arrivaltimes = findEQArrivalTimes(network, station,
                                                    quakedict)
    quakedict['distance'] = [quakedist for quakedist in quakedistskm]
    quakedict['arrival time'] = [arrivaltime for arrivaltime in arrivaltimes]

    return (quakedict)
Пример #37
0
def main():
    pd.set_option('display.width', 1000)
    pd.set_option('display.max_rows', 1000)
    pd.set_option('display.max_colwidth', -1)
    pd.set_option('display.max_columns', 1000)
    pd.set_option("display.colheader_justify", "left")
    parser = get_parser()
    args = parser.parse_args()

    setup_logger(args.logfile, args.loglevel)

    # make sure that input products are in the list of supported products
    if not set(args.products) <= set(PRODUCTS):
        unsupported = list(set(args.products) - set(PRODUCTS))
        fmt = 'The following event products are not supported: '
        print(fmt % (','.join(unsupported)))
        sys.exit(1)

    # make sure that excluded products are in the list of supported products
    if not set(args.exclude_products) <= set(PRODUCTS):
        unsupported = list(set(args.exclude_products) - set(PRODUCTS))
        fmt = ('The following event products you want to exclude '
               'are not supported: ')
        print(fmt % (','.join(unsupported)))
        sys.exit(1)

    # web output and directory output are mutually exclusive
    if args.outdir and args.web:
        msg = '''The -o and -w options are mutually exclusive, meaning
        that you cannot choose to write files to a directory and print
        HTML output to the screen simultaneously. Please choose one of
        those two options and try again.
        '''
        print(msg)
        sys.exit(1)

    if args.products:
        products = args.products
    else:
        products = PRODUCTS

    if args.exclude_products:
        products = set(products) - set(args.exclude_products)

    try:
        dataframe, event = get_history_data_frame(args.eventid, products)
    except Exception as e:
        fmt = '''Failed to retrieve event history data for
        event %s. Error message is as follows. Exiting.
        "%s"
        '''
        tpl = (args.eventid, str(e))
        print(fmt % tpl)
        sys.exit(1)

    if args.radius:
        radius_km = args.radius[0]
        radius_secs = args.radius[1]
        stime = event.time - timedelta(seconds=radius_secs)
        etime = event.time + timedelta(seconds=radius_secs)

        eventlist = search(starttime=stime,
                           endtime=etime,
                           latitude=event.latitude,
                           longitude=event.longitude,
                           maxradiuskm=radius_km)
        for tevent in eventlist:
            if tevent.id == event.id:
                continue
            detail = tevent.getDetailEvent(includesuperseded=True)
            tframe = get_history_data_frame(detail, products)
            newframe = _mod_tframe(event, tevent, tframe)
            dataframe = dataframe.append(newframe, ignore_index=True)

        # now re-sort by update time
        dataframe = dataframe.sort_values('Update Time')
        dataframe = dataframe[PRODUCT_COLUMNS]
    else:
        # since "Authoritative Event ID" and "Associated" columns are only applicable when
        # we're including other events in our results, drop those columns
        # if we're not doing that.
        drop_columns = ['Authoritative Event ID', 'Associated']
        dataframe = dataframe.drop(drop_columns, axis='columns')

    if args.outdir is not None and not os.path.isdir(args.outdir):
        os.makedirs(args.outdir)

    if args.split:
        df_products = dataframe['Product'].unique().tolist()
        available_products = set(df_products) & set(products)
        # TODO: Consider merging phase-data and origin products
        # somehow in this process
        for product in available_products:
            pframe = split_history_frame(dataframe, product=product)
            simplify_times(pframe)
            if args.web:
                web_print(event, pframe)
            else:
                outfile = save_dataframe(args.outdir,
                                         args.format,
                                         event,
                                         pframe,
                                         product=product)
                print('%i rows saved to %s' % (len(pframe), outfile))
        sys.exit(0)

    if args.outdir:
        outfile = save_dataframe(args.outdir,
                                 args.format,
                                 event,
                                 dataframe,
                                 product=None)

        print('%i rows saved to %s' % (len(dataframe), outfile))
    elif args.web:
        simplify_times(dataframe)
        web_print(event, dataframe)

    sys.exit(0)
Пример #38
0
def main(args):
    events = search(starttime=args.start,
                    endtime=args.end,
                    minlatitude=args.latmin,
                    maxlatitude=args.latmax,
                    minlongitude=args.lonmin,
                    maxlongitude=args.lonmax,
                    producttype='shakemap',
                    maxmagnitude=args.magRange[1],
                    minmagnitude=args.magRange[0])
    print('%i events found containing ShakeMaps.' % len(events))

    # Create the GeoDict to which the ShakeMaps will be resampled
    stack_dict = GeoDict.createDictFromBox(args.lonmin, args.lonmax,
                                           args.latmin, args.latmax,
                                           args.resolution, args.resolution)
    nrows, ncols = stack_dict.ny, stack_dict.nx
    imts = {}
    layer_names = {}
    event_info = {}
    layer_count = {}
    ic = 0
    for event in events:
        tnow = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
        if ic % 10 == 0:
            print('%s: Attempting to fetch ShakeMap for %s (%i of %i)' %
                  (tnow, event.id, ic, len(events)))
        ic += 1
        event_info[event.id] = event.toDict()
        try:
            detail = event.getDetailEvent()
        except Exception as e:
            fmt = 'Could not retrieve detail data for event %s, error "%s". Skipping.'
            print(fmt % (event.id, str(e)))
            continue
        if not detail.hasProduct('shakemap'):
            print(
                'Event %s appears not to have a ShakeMap after all... skipping.'
                % detail.id)
        shakemap = detail.getProducts('shakemap')[0]
        try:
            f, gridfile = tempfile.mkstemp()
            os.close(f)
            shakemap.getContent('grid.xml', gridfile)
            shakegrid = ShakeGrid.load(gridfile,
                                       samplegeodict=stack_dict,
                                       resample=True,
                                       doPadding=True)
            imtlist = list(shakegrid.getLayerNames())

            # remove the things that are not ground motions
            kill_list = ['stdpga', 'urat', 'svel']
            for layer in kill_list:
                if layer in imtlist:
                    imtlist.remove(layer)

            for imt in imtlist:
                imtdata = shakegrid.getLayer(imt).getData()
                if imt not in imts:
                    imts[imt] = np.zeros((nrows, ncols, len(events)))
                    layer_count[imt] = 0
                    idx = 0
                    layer_names[imt] = [event.id]
                else:
                    idx = layer_count[imt] + 1
                    layer_names[imt].append(event.id)
                    layer_count[imt] = layer_count[imt] + 1
                imts[imt][:, :, idx] = imtdata
        except Exception as e:
            print('Error fetching ShakeMap grid from %s -  "%s".  Skipping.' %
                  (event.id, str(e)))
        finally:
            os.remove(gridfile)

    # make sure all imts have valid grids in each vertical layer
    # trim off any layers that don't have any data in them.
    for imtname, imtcube in imts.items():
        height_diff = len(events) - (layer_count[imtname] + 1)
        if height_diff:
            top_layer = layer_count[imtname]
            imts[imtname] = imtcube[:, :, 0:top_layer]

    # now create an HDF file, and stuff our data and metadata into it
    stack_file = GridHDFContainer.create(args.outputfile)
    stack_file.setDictionary('layer_names', layer_names)
    stack_file.setDictionary('event', event_info)
    metadata = stack_dict.asDict()
    for imtname, imtcube in imts.items():
        stack_file.setArray(imtname,
                            imtcube,
                            metadata=metadata,
                            compression=True)

    stack_file.close()