def test_get_pager_data_frame(): cassettes, datadir = get_datadir() EVENTID = 'us2000h8ty' detail = get_event_by_id(EVENTID) tape_file = os.path.join(cassettes, 'dataframes_pager.yaml') with vcr.use_cassette(tape_file): df = get_pager_data_frame(detail) mmi3_total = 2248544 mmi3 = df.iloc[0]['mmi3'] assert mmi3 == mmi3_total df = get_pager_data_frame(detail, get_losses=True, get_country_exposures=True) assert mmi3_total == df.iloc[1:]['mmi3'].sum() df = get_pager_data_frame(detail, get_losses=True) testfat = 13 testeco = 323864991 assert df.iloc[0]['predicted_fatalities'] == testfat assert df.iloc[0]['predicted_dollars'] == testeco df = get_pager_data_frame(detail, get_losses=True, get_country_exposures=True) assert df.iloc[1:]['predicted_fatalities'].sum() == testfat assert df.iloc[1:]['predicted_dollars'].sum() == testeco EVENTID = 'us1000778i' detail = get_event_by_id(EVENTID) df = get_pager_data_frame(detail) testval = 14380 assert df.iloc[0]['mmi4'] == testval # test getting superseded versions of the pager product EVENTID = 'us2000h8ty' detail = get_event_by_id(EVENTID, includesuperseded=True) df = get_pager_data_frame(detail, get_losses=True) version_7 = df[df['pager_version'] == 7].iloc[0] v7fats = 16 assert version_7['predicted_fatalities'] == v7fats
def test_get_pager_data_frame(): datadir = get_datadir() EVENTID = 'us2000h8ty' detail = get_event_by_id(EVENTID) tape_file = os.path.join(datadir, 'vcr_pager_results.yaml') # with vcr.use_cassette(tape_file): df = get_pager_data_frame(detail) mmi3_total = 2248544 mmi3 = df.iloc[0]['mmi3'] assert mmi3 == mmi3_total df = get_pager_data_frame(detail, get_country_exposures=True) assert mmi3_total == df.iloc[1:]['mmi3'].sum() df = get_pager_data_frame(detail, get_losses=True) testfat = 13 testeco = 323864991 assert df.iloc[0]['predicted_fatalities'] == testfat assert df.iloc[0]['predicted_dollars'] == testeco df = get_pager_data_frame(detail, get_losses=True, get_country_exposures=True) assert df.iloc[1:]['predicted_fatalities'].sum() == testfat assert df.iloc[1:]['predicted_dollars'].sum() == testeco EVENTID = 'us1000778i' detail = get_event_by_id(EVENTID) df = get_pager_data_frame(detail) testval = 14380 assert df.iloc[0]['mmi4'] == testval # test getting superseded versions of the pager product EVENTID = 'us2000h8ty' detail = get_event_by_id(EVENTID, includesuperseded=True) df = get_pager_data_frame(detail, get_losses=True) version_7 = df[df['pager_version'] == 7].iloc[0] v7fats = 16 assert version_7['predicted_fatalities'] == v7fats
def main(): parser = get_parser() args = parser.parse_args() setup_logger(args.logfile, args.loglevel) latitude = None longitude = None radiuskm = None lonmin = latmin = lonmax = latmax = None if args.radius: latitude = args.radius[0] longitude = args.radius[1] radiuskm = args.radius[2] if args.bounds: lonmin, lonmax, latmin, latmax = args.bounds # fix longitude bounds when crossing dateline if lonmin > lonmax and lonmax >= -180: lonmin -= 360 else: lonmin, lonmax, latmin, latmax = None, None, None, None minmag = 0.0 maxmag = 9.9 if args.magRange: minmag = args.magRange[0] maxmag = args.magRange[1] if args.bounds and args.radius: print('Please specify either a bounding box OR radius search.') sys.exit(1) if args.eventid: event = get_event_by_id(args.eventid, includesuperseded=args.all) events = [event] else: events = search(starttime=args.startTime, endtime=args.endTime, updatedafter=args.after, minlatitude=latmin, maxlatitude=latmax, minlongitude=lonmin, maxlongitude=lonmax, latitude=latitude, longitude=longitude, maxradiuskm=radiuskm, maxmagnitude=maxmag, minmagnitude=minmag, producttype='losspager', host=args.host) if not len(events): print('No events found matching your search criteria. Exiting.') sys.exit(0) dataframe = None nevents = len(events) i = 1 for event in events: logging.debug('Processing event %s (%i of %i).\n' % (event.id, i, nevents)) if isinstance(event, SummaryEvent): detail = event.getDetailEvent(includesuperseded=args.all) else: detail = event df = get_pager_data_frame(detail, get_losses=args.get_losses, get_country_exposures=args.get_countries, get_all_versions=args.all) if dataframe is None: dataframe = df else: dataframe = pd.concat([dataframe, df]) if dataframe is not None: logging.debug('Created table...saving %i records to %s.\n' % (len(dataframe), args.filename)) if args.format == 'excel': dataframe.to_excel(args.filename, index=False) elif args.format == 'tab': dataframe.to_csv(args.filename, sep='\t', index=False) else: dataframe.to_csv(args.filename, index=False, chunksize=1000) add_headers(args.filename, args.format) print('%i records saved to %s.' % (len(dataframe), args.filename)) else: sys.stderr.write('No Pager products found for requested event(s)\n') sys.exit(0)