Esempio n. 1
0
def main(args):

    failed = args.failed
    queried = args.queried

    searchlist = []
    idlist = []
    badlist = []
    for filename in os.listdir(failed):
        print('searching through terminal output %s' % filename)
        with open('%s/%s' % (failed, filename)) as fp:
            for cnt, line in enumerate(fp):
                if line[0:6] == 'Failed':
                    f, t, g, d, v, o, e, id_no = line.split()
                    try:
                        searchobj = get_event_by_id(id_no)
                        searchlist.append(searchobj)
                        idlist.append(id_no)
                    except:
                        badlist.append(id_no)

    print('getting details for %i events: ' % (len(idlist)), idlist)
    print('these events %i still failed: ' % (len(badlist)), badlist)
    detaildf = get_summary_data_frame(searchlist)
    detaildf.to_csv('%s/%sfailed.csv' % (queried, queried),
                    header=True,
                    index=False,
                    na_rep=np.nan)
Esempio n. 2
0
def test_get_summary_data_frame():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, 'dataframes_summary.yaml')
    with vcr.use_cassette(tape_file):
        events = search(starttime=datetime(1994, 6, 1),
                        endtime=datetime(1994, 10, 6),
                        minmagnitude=8.0, maxmagnitude=9.0)

        df = get_summary_data_frame(events)
        assert len(df) == 2
        assert df.iloc[0]['magnitude'] == 8.2
Esempio n. 3
0
def test_get_summary_data_frame():
    datadir = get_datadir()
    tape_file = os.path.join(datadir, 'vcr_summary_frame.yaml')
    with vcr.use_cassette(tape_file):
        events = search(starttime=datetime(1994, 6, 1),
                        endtime=datetime(1994, 10, 6),
                        minmagnitude=8.0, maxmagnitude=9.0, verbose=True)

        df = get_summary_data_frame(events)
        assert len(df) == 2
        assert df.iloc[0]['magnitude'] == 8.2
Esempio n. 4
0
def test_get_summary_data_frame():
    cassettes, datadir = get_datadir()
    tape_file = os.path.join(cassettes, "dataframes_summary.yaml")
    with vcr.use_cassette(tape_file, record_mode="new_episodes"):
        events = search.search(
            starttime=datetime(1994, 6, 1),
            endtime=datetime(1994, 10, 6),
            minmagnitude=8.0,
            maxmagnitude=9.0,
        )

        df = get_summary_data_frame(events)
        assert len(df) == 2
        assert df.iloc[0]["magnitude"] == 8.2
Esempio n. 5
0
 def updateSequences(self, stime):
     etime = stime + timedelta(days=1)
     events = search(starttime=stime,
                     endtime=etime,
                     minlatitude=-90,
                     maxlatitude=90,
                     minlongitude=-180,
                     maxlongitude=180,
                     minmagnitude=0.0,
                     maxmagnitude=9.9)
     todayframe = get_summary_data_frame(events)
     todaydata = get_day_counts(GDICT, todayframe)
     todaygrid = Grid2D(data=todaydata, geodict=GDICT)
     for row in range(0, GDICT.ny):
         for col in range(0, GDICT.nx):
             if row == 19 and col == 29:
                 foo = 1
             clat, clon = GDICT.getLatLon(row, col)
             tvalue = todaygrid._data[row, col]
             mvalue = self._meangrid._data[row, col]
             svalue = self._stdgrid._data[row, col]
             # thresh = tvalue > mvalue + svalue * 3
             thresh = tvalue > MINEQ
             xmin = clon - GDICT.dx / 2
             xmax = clon + GDICT.dx / 2
             ymin = clat - GDICT.dy / 2
             ymax = clat + GDICT.dy / 2
             if thresh:
                 c1 = todayframe['latitude'] > ymin
                 c2 = todayframe['latitude'] <= ymax
                 c3 = todayframe['longitude'] > xmin
                 c4 = todayframe['longitude'] <= xmax
                 cluster = todayframe[c1 & c2 & c3 & c4].copy()
                 class_frame, pproj = self.get_clusters(cluster, clon, clat)
                 self.insertSequences(class_frame, pproj)
     # call a method that filters out clusters that don't match the definition
     # of an earthquake sequence.
     self.cleanSequences()
Esempio n. 6
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    # make sure we don't have -e option AND --numdays option
    if args.endTime is not None and args.numdays is not None:
        msg = ('You must specify end time or number of days since '
               'start time, not both. Exiting.')
        print(msg)
        sys.exit(1)

    if not args.endTime and args.numdays:
        args.endTime = args.startTime + timedelta(args.numdays)

    setup_logger(args.logfile, args.loglevel)

    tsum = (args.bounds is not None) + \
        (args.radius is not None) + (args.country is not None)
    if tsum != 1:
        logging.error(
            'Please specify a bounding box, radius, or country code.')
        sys.exit(1)

    latitude = None
    longitude = None
    radiuskm = None
    lonmin = latmin = lonmax = latmax = None
    bounds = None
    if args.radius:
        latitude = args.radius[0]
        longitude = args.radius[1]
        radiuskm = args.radius[2]

    if args.bounds:
        lonmin, lonmax, latmin, latmax = args.bounds
        # fix longitude bounds when crossing dateline
        if lonmin > lonmax and lonmax >= -180:
            lonmin -= 360
    else:
        lonmin, lonmax, latmin, latmax = None, None, None, None
        bounds = (lonmin, lonmax, latmin, latmax)

    if args.country:
        ccode = args.country
        if not check_ccode(ccode):
            curl = 'https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2'
            fmt = ('%s is not a valid ISO 3166 country code. '
                   'See %s for the list.')
            tpl = (ccode, curl)
            logging.error(fmt % tpl)
            sys.exit(1)
        bounds = get_country_bounds(ccode, args.buffer)  # this returns a list

    minmag = 0.0
    maxmag = 9.9
    if args.magRange:
        minmag = args.magRange[0]
        maxmag = args.magRange[1]

    minsig = 0
    maxsig = 5000
    if args.sigRange:
        minsig = args.sigRange[0]
        maxsig = args.sigRange[1]

    if args.getCount:
        if isinstance(bounds, tuple) or bounds is None:
            nevents = count(starttime=args.startTime,
                            endtime=args.endTime,
                            updatedafter=args.after,
                            minlatitude=latmin,
                            maxlatitude=latmax,
                            minlongitude=lonmin,
                            maxlongitude=lonmax,
                            latitude=latitude,
                            longitude=longitude,
                            maxradiuskm=radiuskm,
                            catalog=args.catalog,
                            contributor=args.contributor,
                            maxmagnitude=maxmag,
                            minmagnitude=minmag,
                            minsig=minsig,
                            maxsig=maxsig,
                            producttype=args.limitByProductType)
        else:
            for lonmin, lonmax, latmin, latmax in bounds:
                nevents = 0
                nevents += count(starttime=args.startTime,
                                 endtime=args.endTime,
                                 updatedafter=args.after,
                                 minlatitude=latmin,
                                 maxlatitude=latmax,
                                 minlongitude=lonmin,
                                 maxlongitude=lonmax,
                                 latitude=latitude,
                                 longitude=longitude,
                                 maxradiuskm=radiuskm,
                                 catalog=args.catalog,
                                 contributor=args.contributor,
                                 minsig=minsig,
                                 maxsig=maxsig,
                                 maxmagnitude=maxmag,
                                 minmagnitude=minmag,
                                 producttype=args.limitByProductType)
        print('There are %i events matching input criteria.' % nevents)
        sys.exit(0)
    if isinstance(bounds, tuple) or bounds is None:
        events = search(starttime=args.startTime,
                        endtime=args.endTime,
                        updatedafter=args.after,
                        minlatitude=latmin,
                        maxlatitude=latmax,
                        minlongitude=lonmin,
                        maxlongitude=lonmax,
                        latitude=latitude,
                        longitude=longitude,
                        maxradiuskm=radiuskm,
                        catalog=args.catalog,
                        contributor=args.contributor,
                        maxmagnitude=maxmag,
                        minmagnitude=minmag,
                        minsig=minsig,
                        maxsig=maxsig,
                        producttype=args.limitByProductType,
                        host=args.host,
                        eventtype=args.event_type,
                        alertlevel=args.alert_level)
    else:
        events = []
        for i, tbounds in enumerate(bounds):
            lonmin, lonmax, latmin, latmax = tbounds
            fmt = 'Checking bounds %i of %i for %s...\n'
            tpl = (i + 1, len(bounds), ccode)
            logging.debug(fmt % tpl)
            tevents = search(starttime=args.startTime,
                             endtime=args.endTime,
                             updatedafter=args.after,
                             minlatitude=latmin,
                             maxlatitude=latmax,
                             minlongitude=lonmin,
                             maxlongitude=lonmax,
                             latitude=latitude,
                             longitude=longitude,
                             maxradiuskm=radiuskm,
                             catalog=args.catalog,
                             contributor=args.contributor,
                             maxmagnitude=maxmag,
                             minmagnitude=minmag,
                             minsig=minsig,
                             maxsig=maxsig,
                             producttype=args.limitByProductType,
                             host=args.host,
                             eventtype=args.event_type,
                             alertlevel=args.alert_level)
            events += tevents

    if not len(events):
        logging.info('No events found matching your search criteria. Exiting.')
        sys.exit(0)

    if (args.getAngles != 'none' or
            args.getAllMags or
            args.getComponents != 'none'):

        logging.info(
            'Fetched %i events...creating table.\n' % (len(events)))
        supp = args.getMomentSupplement
        df = get_detail_data_frame(events, get_all_magnitudes=args.getAllMags,
                                   get_tensors=args.getComponents,
                                   get_focals=args.getAngles,
                                   get_moment_supplement=supp)
    else:
        logging.info(
            'Fetched %i events...creating summary table.\n' % (len(events)))
        df = get_summary_data_frame(events)

    # order the columns so that at least the initial parameters come the way
    # we want them...
    first_columns = list(events[0].toDict().keys())
    col_list = list(df.columns)
    for column in first_columns:
        try:
            col_list.remove(column)
        except Exception as e:
            x = 1
    df = df[first_columns + col_list]

    if args.country:
        df = filter_by_country(df, ccode, buffer_km=args.buffer)

    logging.info('Created table...saving %i records to %s.\n' %
                 (len(df), args.filename))
    if args.format == 'excel':
        df.to_excel(args.filename, index=False)
    elif args.format == 'tab':
        df.to_csv(args.filename, sep='\t', index=False)
    else:
        df.to_csv(args.filename, index=False, chunksize=1000)
    logging.info('%i records saved to %s.' % (len(df), args.filename))
    sys.exit(0)
    def updateSequences(self, stime):
        etime = stime + timedelta(days=1)
        t1 = time.time()
        events = search(starttime=stime,
                        endtime=etime,
                        minlatitude=-90,
                        maxlatitude=90,
                        minlongitude=-180,
                        maxlongitude=180,
                        minmagnitude=0.0,
                        maxmagnitude=9.9)
        todayframe = get_summary_data_frame(events)
        logging.info('Got day data...')
        gdict = GeoDict(self._config['GDICT'])
        for row in range(0, gdict.ny):
            for col in range(0, gdict.nx):
                clat, clon = gdict.getLatLon(row, col)
                xmin = clon - gdict.dx / 2
                xmax = clon + gdict.dx / 2
                ymin = clat - gdict.dy / 2
                ymax = clat + gdict.dy / 2

                c1 = todayframe['latitude'] > ymin
                c2 = todayframe['latitude'] <= ymax
                c3 = todayframe['longitude'] > xmin
                c4 = todayframe['longitude'] <= xmax
                gridframe = todayframe[c1 & c2 & c3 & c4].copy()
                if not len(gridframe):
                    continue
                cluster_list, pproj = self.getClusters(gridframe)
                if len(cluster_list):
                    self.insertClusters(cluster_list, pproj)

        # now we need to merge sequences that may have fallen on the edge of
        # a grid border.
        logging.info('Matching sequences...')
        sequence_sets = self._seqdb.mergeSequences()
        for tseqset in sequence_sets:
            seqset = sorted(list(tseqset))
            id1 = seqset[0]
            frame1 = self._seqdb.getSequenceEvents(id1)
            xmin1 = frame1['longitude'].min()
            xmax1 = frame1['longitude'].max()
            ymin1 = frame1['latitude'].min()
            ymax1 = frame1['latitude'].max()
            bounds1 = (xmin1, xmax1, ymin1, ymax1)
            for i in range(1, len(seqset)):
                id2 = seqset[i]
                frame2 = self._seqdb.getSequenceEvents(id2)
                xmin2 = frame2['longitude'].min()
                xmax2 = frame2['longitude'].max()
                ymin2 = frame2['latitude'].min()
                ymax2 = frame2['latitude'].max()
                bounds2 = (xmin2, xmax2, ymin2, ymax2)
                logging.info('Merging sequence %i and %i' % (id1, id2))
                fmt = 'Bounds1: %.2f, %.2f, %.2f, %.2f'
                logging.info(fmt % (bounds1))
                fmt = 'Bounds2: %.2f, %.2f, %.2f, %.2f'
                logging.info(fmt % (bounds2))
                dataframe = pd.concat([frame1, frame2], axis=0)
                if dataframe.duplicated('id').any():
                    foo = 1
                proj = self.getProj(dataframe)
                sqstats = self.getSequenceStats(dataframe, proj)
                self._seqdb.updateSequence(frame2, sqstats, id1)
                self._seqdb.deleteSequence(id2)

        logging.info('Finding stopped sequences...')
        # now find all sequences that seem to have ended (using config criteria)
        ended = self._seqdb.getStoppedDataframe()
        if len(ended) > 0:
            x = 1
        self._seqdb.markStoppedSequences(ended)

        logging.info('Finding non sequences...')
        # find all ended sequences that don't match our criteria for a sequence
        deletes = self._seqdb.getNonSequences()
        if len(deletes) > 0:
            x = 1
        self._seqdb.deleteNonSequences(deletes)
        t2 = time.time()