def test(): print('Testing Northridge exposure check (with GPW data).') events = ['northridge'] homedir = os.path.dirname( os.path.abspath(__file__)) # where is this script? excelfile = os.path.join(homedir, '..', 'data', 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') for event in events: shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) growth = PopulationGrowth.fromDefault() exp = Exposure(popfile, 2012, isofile) results = exp.calcExposure(shakefile) cmpexposure = [ 0, 0, 1817, 1767260, 5840985, 5780298, 2738374, 1559657, 4094, 0 ] np.testing.assert_almost_equal(cmpexposure, results['TotalExposure']) print('Passed Northridge exposure check (with GPW data).')
def test(outfolder=None): homedir = os.path.dirname(os.path.abspath(__file__)) topdata = os.path.abspath(os.path.join(homedir, '..', 'data')) datadir = os.path.abspath(os.path.join(topdata, 'eventdata', 'northridge')) cityfile = os.path.join(topdata, 'cities1000.txt') oceanfile = os.path.join(datadir, 'northridge_ocean.json') shakefile = os.path.join(datadir, 'northridge_grid.xml') popfile = os.path.join(datadir, 'northridge_gpw.flt') ogridfile = os.path.join(datadir, 'northridge_ocean.bil') isofile = os.path.join(datadir, 'northridge_isogrid.bil') exp = Exposure(popfile, 2012, isofile) results = exp.calcExposure(shakefile) shakegrid = exp.getShakeGrid() popgrid = exp.getPopulationGrid() print('Testing to see if PAGER can successfully create contour map...') hasfolder = False if outfolder is not None: hasfolder = True try: if not hasfolder: outfolder = tempfile.mkdtemp() basefile = os.path.join(outfolder, 'output') pdffile, pngfile, mapcities = draw_contour(shakegrid, popgrid, oceanfile, ogridfile, cityfile, basefile) print('Output pdf is %s, output png is %s.' % (pdffile, pngfile)) assert os.path.isfile(pngfile) and os.path.isfile(pdffile) except Exception as error: raise error finally: if os.path.isdir(outfolder) and not hasfolder: shutil.rmtree(outfolder) print('Passed.')
def basic_test(): mmidata = np.array([[7, 8, 8, 8, 7], [8, 9, 9, 9, 8], [8, 9, 10, 9, 8], [8, 9, 9, 8, 8], [7, 8, 8, 6, 5]], dtype=np.float32) popdata = np.ones_like(mmidata) * 1e7 isodata = np.array( [[4, 4, 4, 4, 4], [4, 4, 4, 4, 4], [4, 4, 156, 156, 156], [156, 156, 156, 156, 156], [156, 156, 156, 156, 156]], dtype=np.int32) shakefile = get_temp_file_name() popfile = get_temp_file_name() isofile = get_temp_file_name() geodict = GeoDict({ 'xmin': 0.5, 'xmax': 4.5, 'ymin': 0.5, 'ymax': 4.5, 'dx': 1.0, 'dy': 1.0, 'nx': 5, 'ny': 5 }) layers = OrderedDict([ ('mmi', mmidata), ]) event_dict = { 'event_id': 'us12345678', 'magnitude': 7.8, 'depth': 10.0, 'lat': 34.123, 'lon': -118.123, 'event_timestamp': datetime.utcnow(), 'event_description': 'foo', 'event_network': 'us' } shake_dict = { 'event_id': 'us12345678', 'shakemap_id': 'us12345678', 'shakemap_version': 1, 'code_version': '4.5', 'process_timestamp': datetime.utcnow(), 'shakemap_originator': 'us', 'map_status': 'RELEASED', 'shakemap_event_type': 'ACTUAL' } unc_dict = {'mmi': (1, 1)} shakegrid = ShakeGrid(layers, geodict, event_dict, shake_dict, unc_dict) shakegrid.save(shakefile) popgrid = Grid2D(popdata, geodict.copy()) isogrid = Grid2D(isodata, geodict.copy()) write(popgrid, popfile, 'netcdf') write(isogrid, isofile, 'netcdf') ratedict = { 4: { 'start': [2010, 2012, 2014, 2016], 'end': [2012, 2014, 2016, 2018], 'rate': [0.01, 0.02, 0.03, 0.04] }, 156: { 'start': [2010, 2012, 2014, 2016], 'end': [2012, 2014, 2016, 2018], 'rate': [0.02, 0.03, 0.04, 0.05] } } popgrowth = PopulationGrowth(ratedict) popyear = datetime.utcnow().year exposure = Exposure(popfile, popyear, isofile, popgrowth=popgrowth) expdict = exposure.calcExposure(shakefile) modeldict = [ LognormalModel('AF', 11.613073, 0.180683, 1.0), LognormalModel('CN', 10.328811, 0.100058, 1.0) ] fatmodel = EmpiricalLoss(modeldict) # for the purposes of this test, let's override the rates # for Afghanistan and China with simpler numbers. fatmodel.overrideModel( 'AF', np.array([0, 0, 0, 0, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 0], dtype=np.float32)) fatmodel.overrideModel( 'CN', np.array([0, 0, 0, 0, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 0], dtype=np.float32)) print('Testing very basic fatality calculation...') fatdict = fatmodel.getLosses(expdict) # strictly speaking, the afghanistant fatalities should be 462,000 but floating point precision dictates otherwise. testdict = {'CN': 46111, 'AF': 461999, 'TotalFatalities': 508110} for key, value in fatdict.items(): assert value == testdict[key] print('Passed very basic fatality calculation...') print('Testing grid fatality calculations...') mmidata = exposure.getShakeGrid().getLayer('mmi').getData() popdata = exposure.getPopulationGrid().getData() isodata = exposure.getCountryGrid().getData() fatgrid = fatmodel.getLossGrid(mmidata, popdata, isodata) assert np.nansum(fatgrid) == 508111 print('Passed grid fatality calculations...') # Testing modifying rates and stuffing them back in... chile = LognormalModel('CL', 19.786773, 0.259531, 0.0) rates = chile.getLossRates(np.arange(5, 10)) modrates = rates * 2 # does this make event twice as deadly? # roughly the exposures from 2015-9-16 CL event expo_pop = np.array( [0, 0, 0, 1047000, 7314000, 1789000, 699000, 158000, 0, 0]) mmirange = np.arange(5, 10) chile_deaths = chile.getLosses(expo_pop[4:9], mmirange) chile_double_deaths = chile.getLosses(expo_pop[4:9], mmirange, rates=modrates) print('Chile model fatalities: %f' % chile_deaths) print('Chile model x2 fatalities: %f' % chile_double_deaths)
def test(): # where is this script? homedir = os.path.dirname(os.path.abspath(__file__)) event = 'northridge' shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) shapefile = os.path.join(homedir, '..', 'data', 'eventdata', event, 'City_BoundariesWGS84', 'City_Boundaries.shp') print('Test loading empirical fatality model from XML file...') fatmodel = EmpiricalLoss.fromDefaultFatality() print('Passed loading empirical fatality model from XML file.') print('Test getting alert level from various losses...') assert fatmodel.getAlertLevel({'TotalFatalities': 0}) == 'green' assert fatmodel.getAlertLevel({'TotalFatalities': 5}) == 'yellow' assert fatmodel.getAlertLevel({'TotalFatalities': 100}) == 'orange' assert fatmodel.getAlertLevel({'TotalFatalities': 1000}) == 'red' # 1000 times Earth's population assert fatmodel.getAlertLevel({'TotalFatalities': 1e13}) == 'red' print('Passed getting alert level from various losses.') print('Test retrieving fatality model data from XML file...') model = fatmodel.getModel('af') testmodel = LognormalModel('dummy', 11.613073, 0.180683, 8.428822) assert model == testmodel print('Passed retrieving fatality model data from XML file.') print('Testing with known exposures/fatalities for 1994 Northridge EQ...') exposure = { 'xf': np.array([ 0, 0, 1506.0, 1946880.0, 6509154.0, 6690236.0, 3405381.0, 1892446.0, 5182.0, 0 ]) } fatdict = fatmodel.getLosses(exposure) testdict = {'xf': 22} assert fatdict['xf'] == testdict['xf'] print( 'Passed testing with known exposures/fatalities for 1994 Northridge EQ.' ) print( 'Testing combining G values from all countries that contributed to losses...' ) fatdict = {'CO': 2.38005147e-01, 'EC': 8.01285916e+02} zetf = fatmodel.getCombinedG(fatdict) assert zetf == 2.5 print( 'Passed combining G values from all countries that contributed to losses...' ) print('Testing calculating probabilities for standard PAGER ranges...') expected = {'UK': 70511, 'TotalFatalities': 70511} G = 2.5 probs = fatmodel.getProbabilities(expected, G) testprobs = { '0-1': 3.99586017993e-06, '1-10': 0.00019277654968408576, '10-100': 0.0041568251597835061, '100-1000': 0.039995273501147441, '1000-10000': 0.17297196910604343, '10000-100000': 0.3382545813262674, '100000-10000000': 0.44442457847445394 } for key, value in probs.items(): np.testing.assert_almost_equal(value, testprobs[key]) print( 'Passed combining G values from all countries that contributed to losses...' ) print('Testing calculating total fatalities for Northridge...') expobject = Exposure(popfile, 2012, isofile) expdict = expobject.calcExposure(shakefile) fatdict = fatmodel.getLosses(expdict) testdict = {'XF': 18} assert fatdict['XF'] == testdict['XF'] print('Passed calculating total fatalities for Northridge...') print('Testing creating a fatality grid...') mmidata = expobject.getShakeGrid().getLayer('mmi').getData() popdata = expobject.getPopulationGrid().getData() isodata = expobject.getCountryGrid().getData() fatgrid = fatmodel.getLossGrid(mmidata, popdata, isodata) print(np.nansum(fatgrid)) print('Passed creating a fatality grid.') print('Testing assigning fatalities to polygons...') popdict = expobject.getPopulationGrid().getGeoDict() shapes = [] f = fiona.open(shapefile, 'r') for row in f: shapes.append(row) f.close() fatshapes, totfat = fatmodel.getLossByShapes(mmidata, popdata, isodata, shapes, popdict) fatalities = 12 for shape in fatshapes: if shape['id'] == '312': # Los Angeles cname = shape['properties']['CITY_NAME'] lalosses = shape['properties']['fatalities'] assert lalosses == fatalities assert cname == 'Los Angeles' break print('Passed assigning fatalities to polygons...')
def test(): homedir = os.path.dirname(os.path.abspath( __file__)) # where is this script? fatfile = os.path.join(homedir, '..', 'data', 'fatality.xml') ecofile = os.path.join(homedir, '..', 'data', 'economy.xml') cityfile = os.path.join(homedir, '..', 'data', 'cities1000.txt') event = 'northridge' shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) urbanfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_urban.bil') oceanfile = os.path.join( homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.json') oceangridfile = os.path.join( homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.bil') timezonefile = os.path.join( homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_timezone.shp') invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf') colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf') casfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf') workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf') tdir = tempfile.mkdtemp() basename = os.path.join(tdir, 'output') exp = Exposure(popfile, 2012, isofile) results = exp.calcExposure(shakefile) shakegrid = exp.getShakeGrid() popgrid = exp.getPopulationGrid() pdffile, pngfile, mapcities = draw_contour( shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename) shutil.rmtree(tdir) popyear = 2012 shake_tuple = getHeaderData(shakefile) tsunami = shake_tuple[1]['magnitude'] >= TSUNAMI_MAG_THRESH semi = SemiEmpiricalFatality.fromDefault() semi.setGlobalFiles(popfile, popyear, urbanfile, isofile) semiloss, resfat, nonresfat = semi.getLosses(shakefile) popgrowth = PopulationGrowth.fromDefault() econexp = EconExposure(popfile, 2012, isofile) fatmodel = EmpiricalLoss.fromDefaultFatality() expobject = Exposure(popfile, 2012, isofile, popgrowth) expdict = expobject.calcExposure(shakefile) fatdict = fatmodel.getLosses(expdict) econexpdict = econexp.calcExposure(shakefile) ecomodel = EmpiricalLoss.fromDefaultEconomic() ecodict = ecomodel.getLosses(expdict) shakegrid = econexp.getShakeGrid() pagerversion = 1 cities = Cities.loadFromGeoNames(cityfile) impact1 = '''Red alert level for economic losses. Extensive damage is probable and the disaster is likely widespread. Estimated economic losses are less than 1% of GDP of Italy. Past events with this alert level have required a national or international level response.''' impact2 = '''Orange alert level for shaking-related fatalities. Significant casualties are likely.''' structcomment = '''Overall, the population in this region resides in structures that are a mix of vulnerable and earthquake resistant construction. The predominant vulnerable building types are unreinforced brick with mud and mid-rise nonductile concrete frame with infill construction.''' histeq = [1, 2, 3] struct_comment = '''Overall, the population in this region resides in structures that are resistant to earthquake shaking, though some vulnerable structures exist.''' secondary_comment = '''Recent earthquakes in this area have caused secondary hazards such as landslides that might have contributed to losses.''' hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n', '') location = 'At the top of the world.' is_released = True doc = PagerData() eventcode = shakegrid.getEventDict()['event_id'] versioncode = eventcode doc.setInputs(shakegrid, timezonefile, pagerversion, versioncode, eventcode, tsunami, location, is_released) doc.setExposure(expdict, econexpdict) doc.setModelResults(fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat) doc.setComments(impact1, impact2, struct_comment, hist_comment, secondary_comment) doc.setMapInfo(cityfile, mapcities) doc.validate() # let's test the property methods tdoc(doc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment) # see if we can save this to a bunch of files then read them back in try: tdir = tempfile.mkdtemp() doc.saveToJSON(tdir) newdoc = PagerData() newdoc.loadFromJSON(tdir) tdoc(newdoc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment) # test the xml saving method xmlfile = doc.saveToLegacyXML(tdir) except Exception as e: assert 1 == 2 finally: shutil.rmtree(tdir)
def test(): homedir = os.path.dirname(os.path.abspath(__file__)) #where is this script? fatfile = os.path.join(homedir,'..','data','fatality.xml') ecofile = os.path.join(homedir,'..','data','economy.xml') cityfile = os.path.join(homedir,'..','data','cities1000.txt') event = 'northridge' shakefile = os.path.join(homedir,'..','data','eventdata',event,'%s_grid.xml' % event) popfile = os.path.join(homedir,'..','data','eventdata',event,'%s_gpw.flt' % event) isofile = os.path.join(homedir,'..','data','eventdata',event,'%s_isogrid.bil' % event) urbanfile = os.path.join(homedir,'..','data','eventdata','northridge','northridge_urban.bil') oceanfile = os.path.join(homedir,'..','data','eventdata','northridge','northridge_ocean.json') invfile = os.path.join(homedir,'..','data','semi_inventory.hdf') colfile = os.path.join(homedir,'..','data','semi_collapse_mmi.hdf') casfile = os.path.join(homedir,'..','data','semi_casualty.hdf') workfile = os.path.join(homedir,'..','data','semi_workforce.hdf') tdir = tempfile.mkdtemp() outfile = os.path.join(tdir,'output.pdf') pngfile,mapcities = draw_contour(shakefile,popfile,oceanfile,cityfile,outfile,make_png=True) shutil.rmtree(tdir) popyear = 2012 semi = SemiEmpiricalFatality.fromDefault() semi.setGlobalFiles(popfile,popyear,urbanfile,isofile) semiloss,resfat,nonresfat = semi.getLosses(shakefile) popgrowth = PopulationGrowth.fromDefault() econexp = EconExposure(popfile,2012,isofile) fatmodel = EmpiricalLoss.fromDefaultFatality() expobject = Exposure(popfile,2012,isofile,popgrowth) expdict = expobject.calcExposure(shakefile) fatdict = fatmodel.getLosses(expdict) econexpdict = econexp.calcExposure(shakefile) ecomodel = EmpiricalLoss.fromDefaultEconomic() ecodict = ecomodel.getLosses(expdict) shakegrid = econexp.getShakeGrid() pagerversion = 1 cities = Cities.loadFromGeoNames(cityfile) impact1 = '''Red alert level for economic losses. Extensive damage is probable and the disaster is likely widespread. Estimated economic losses are less than 1% of GDP of Italy. Past events with this alert level have required a national or international level response.''' impact2 = '''Orange alert level for shaking-related fatalities. Significant casualties are likely.''' structcomment = '''Overall, the population in this region resides in structures that are a mix of vulnerable and earthquake resistant construction. The predominant vulnerable building types are unreinforced brick with mud and mid-rise nonductile concrete frame with infill construction.''' histeq = [1,2,3] struct_comment = '''Overall, the population in this region resides in structures that are resistant to earthquake shaking, though some vulnerable structures exist.''' secondary_comment = '''Recent earthquakes in this area have caused secondary hazards such as landslides that might have contributed to losses.''' hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n','') doc = PagerData() doc.setInputs(shakegrid,pagerversion,shakegrid.getEventDict()['event_id']) doc.setExposure(expdict,econexpdict) doc.setModelResults(fatmodel,ecomodel, fatdict,ecodict, semiloss,resfat,nonresfat) doc.setComments(impact1,impact2,struct_comment,hist_comment,secondary_comment) doc.setMapInfo(cityfile,mapcities) doc.validate() eventinfo = doc.getEventInfo() assert eventinfo['mag'] == shakegrid.getEventDict()['magnitude'] imp1,imp2 = doc.getImpactComments() assert imp1 == impact1 and imp2 == impact2 version = doc.getSoftwareVersion() elapsed = doc.getElapsed() exp = doc.getTotalExposure() assert np.isclose(np.array(exp),expdict['TotalExposure']).all() hist_table = doc.getHistoricalTable() assert hist_table[0]['EventID'] == '199206281505' scomm = doc.getStructureComment() assert scomm == struct_comment hcomm = doc.getHistoricalComment() assert hcomm == hist_comment citytable = doc.getCityTable() assert citytable.iloc[0]['name'] == 'Santa Clarita' summary = doc.getSummaryAlert() assert summary == 'yellow'
def main(pargs, config): # get the users home directory homedir = os.path.expanduser("~") # handle cancel messages if pargs.cancel: # we presume that pargs.gridfile in this context is an event ID. msg = _cancel(pargs.gridfile, config) print(msg) return True # what kind of thing is gridfile? is_file = os.path.isfile(pargs.gridfile) is_url, url_gridfile = _is_url(pargs.gridfile) is_pdl, pdl_gridfile = _check_pdl(pargs.gridfile, config) if is_file: gridfile = pargs.gridfile elif is_url: gridfile = url_gridfile elif is_pdl: gridfile = pdl_gridfile else: print("ShakeMap Grid file %s does not exist." % pargs.gridfile) return False pager_folder = os.path.join(homedir, config["output_folder"]) pager_archive = os.path.join(homedir, config["archive_folder"]) admin = PagerAdmin(pager_folder, pager_archive) # stdout will now be logged as INFO, stderr will be logged as WARNING mail_host = config["mail_hosts"][0] mail_from = config["mail_from"] developers = config["developers"] logfile = os.path.join(pager_folder, "pager.log") plog = PagerLogger(logfile, developers, mail_from, mail_host, debug=pargs.debug) logger = plog.getLogger() try: eid = None pager_version = None # get all the basic event information and print it, if requested shake_tuple = getHeaderData(gridfile) eid = shake_tuple[1]["event_id"].lower() etime = shake_tuple[1]["event_timestamp"] if not len(eid): eid = shake_tuple[0]["event_id"].lower() network = shake_tuple[1]["event_network"].lower() if network == "": network = "us" if not eid.startswith(network): eid = network + eid # Create a ComcatInfo object to hopefully tell us a number of things about this event try: ccinfo = ComCatInfo(eid) location = ccinfo.getLocation() tsunami = ccinfo.getTsunami() authid, allids = ccinfo.getAssociatedIds() authsource, othersources = ccinfo.getAssociatedSources() except: # fail over to what we can determine locally location = shake_tuple[1]["event_description"] tsunami = shake_tuple[1]["magnitude"] >= TSUNAMI_MAG_THRESH authid = eid authsource = network allids = [] # location field can be empty (None), which breaks a bunch of things if location is None: location = "" # Check to see if user wanted to override default tsunami criteria if pargs.tsunami != "auto": if pargs.tsunami == "on": tsunami = True else: tsunami = False # check to see if this event is a scenario is_scenario = False shakemap_type = shake_tuple[0]["shakemap_event_type"] if shakemap_type == "SCENARIO": is_scenario = True # if event is NOT a scenario and event time is in the future, # flag the event as a scenario and yell about it. if etime > datetime.datetime.utcnow(): is_scenario = True logger.warning( "Event origin time is in the future! Flagging this as a scenario." ) if is_scenario: if re.search("scenario", location.lower()) is None: location = "Scenario " + location # create the event directory (if it does not exist), and start logging there logger.info("Creating event directory") event_folder = admin.createEventFolder(authid, etime) # Stop processing if there is a "stop" file in the event folder stopfile = os.path.join(event_folder, "stop") if os.path.isfile(stopfile): fmt = '"stop" file found in %s. Stopping processing, returning with 1.' logger.info(fmt % (event_folder)) return True pager_version = get_pager_version(event_folder) version_folder = os.path.join(event_folder, "version.%03d" % pager_version) os.makedirs(version_folder) event_logfile = os.path.join(version_folder, "event.log") # this will turn off the global rotating log file # and switch to the one in the version folder. plog.setVersionHandler(event_logfile) # Copy the grid.xml file to the version folder # sometimes (usu when testing) the input grid isn't called grid.xml. Rename it here. version_grid = os.path.join(version_folder, "grid.xml") shutil.copyfile(gridfile, version_grid) # Check to see if the tsunami flag has been previously set tsunami_toggle = {"on": 1, "off": 0} tsunami_file = os.path.join(event_folder, "tsunami") if os.path.isfile(tsunami_file): tsunami = tsunami_toggle[open(tsunami_file, "rt").read().strip()] # get the rest of the event info etime = shake_tuple[1]["event_timestamp"] elat = shake_tuple[1]["lat"] elon = shake_tuple[1]["lon"] emag = shake_tuple[1]["magnitude"] # get the year of the event event_year = shake_tuple[1]["event_timestamp"].year # find the population data collected most closely to the event_year pop_year, popfile = _get_pop_year( event_year, config["model_data"]["population_data"] ) logger.info("Population year: %i Population file: %s\n" % (pop_year, popfile)) # Get exposure results logger.info("Calculating population exposure.") isofile = config["model_data"]["country_grid"] expomodel = Exposure(popfile, pop_year, isofile) exposure = None exposure = expomodel.calcExposure(gridfile) # incidentally grab the country code of the epicenter numcode = expomodel._isogrid.getValue(elat, elon) if np.isnan(numcode): cdict = None else: cdict = Country().getCountry(int(numcode)) if cdict is None: ccode = "UK" else: ccode = cdict["ISO2"] logger.info("Country code at epicenter is %s" % ccode) # get fatality results, if requested logger.info("Calculating empirical fatalities.") fatmodel = EmpiricalLoss.fromDefaultFatality() fatdict = fatmodel.getLosses(exposure) # get economic results, if requested logger.info("Calculating economic exposure.") econexpmodel = EconExposure(popfile, pop_year, isofile) ecomodel = EmpiricalLoss.fromDefaultEconomic() econexposure = econexpmodel.calcExposure(gridfile) ecodict = ecomodel.getLosses(econexposure) shakegrid = econexpmodel.getShakeGrid() # Get semi-empirical losses logger.info("Calculating semi-empirical fatalities.") urbanfile = config["model_data"]["urban_rural_grid"] if not os.path.isfile(urbanfile): raise PagerException("Urban-rural grid file %s does not exist." % urbanfile) semi = SemiEmpiricalFatality.fromDefault() semi.setGlobalFiles(popfile, pop_year, urbanfile, isofile) semiloss, resfat, nonresfat = semi.getLosses(gridfile) # get all of the other components of PAGER logger.info("Getting all comments.") # get the fatality and economic comments impact1, impact2 = get_impact_comments( fatdict, ecodict, econexposure, event_year, ccode ) # get comment describing vulnerable structures in the region. struct_comment = get_structure_comment(resfat, nonresfat, semi) # get the comment describing historic secondary hazards secondary_comment = get_secondary_comment(elat, elon, emag) # get the comment describing historical comments in the region historical_comment = get_historical_comment(elat, elon, emag, exposure, fatdict) # generate the probability plots logger.info("Drawing probability plots.") fat_probs_file, eco_probs_file = _draw_probs( fatmodel, fatdict, ecomodel, ecodict, version_folder ) # generate the exposure map exposure_base = os.path.join(version_folder, "exposure") logger.info("Generating exposure map...") oceanfile = config["model_data"]["ocean_vectors"] oceangrid = config["model_data"]["ocean_grid"] cityfile = config["model_data"]["city_file"] borderfile = config["model_data"]["border_vectors"] shake_grid = expomodel.getShakeGrid() pop_grid = expomodel.getPopulationGrid() pdf_file, png_file, mapcities = draw_contour( shake_grid, pop_grid, oceanfile, oceangrid, cityfile, exposure_base, borderfile, is_scenario=is_scenario, ) logger.info("Generated exposure map %s" % pdf_file) # figure out whether this event has been "released". is_released = _get_release_status( pargs, config, fatmodel, fatdict, ecomodel, ecodict, shake_tuple, event_folder, ) # Create a data object to encapsulate everything we know about the PAGER # results, and then serialize that to disk in the form of a number of JSON files. logger.info("Making PAGER Data object.") doc = PagerData() timezone_file = config["model_data"]["timezones_file"] elapsed = pargs.elapsed doc.setInputs( shakegrid, timezone_file, pager_version, shakegrid.getEventDict()["event_id"], authid, tsunami, location, is_released, elapsed=elapsed, ) logger.info("Setting inputs.") doc.setExposure(exposure, econexposure) logger.info("Setting exposure.") doc.setModelResults( fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat ) logger.info("Setting comments.") doc.setComments( impact1, impact2, struct_comment, historical_comment, secondary_comment ) logger.info("Setting map info.") doc.setMapInfo(cityfile, mapcities) logger.info("Validating.") doc.validate() # if we have determined that the event is a scenario (origin time is in the future) # and the shakemap is not flagged as such, set the shakemap type in the # pagerdata object to be 'SCENARIO'. if is_scenario: doc.setToScenario() json_folder = os.path.join(version_folder, "json") os.makedirs(json_folder) logger.info("Saving output to JSON.") doc.saveToJSON(json_folder) logger.info("Saving output to XML.") doc.saveToLegacyXML(version_folder) logger.info("Creating onePAGER pdf...") onepager_pdf, error = create_onepager(doc, version_folder) if onepager_pdf is None: raise PagerException("Could not create onePAGER output: \n%s" % error) # copy the contents.xml file to the version folder contentsfile = get_data_path("contents.xml") if contentsfile is None: raise PagerException("Could not find contents.xml file.") shutil.copy(contentsfile, version_folder) # send pdf as attachment to internal team of PAGER users if not is_released and not is_scenario: message_pager(config, onepager_pdf, doc) # run transfer, as appropriate and as specified by config # the PAGER product eventsource and eventsourcecode should # match the input ShakeMap settings for these properties. # This can possibly cause confusion if a regional ShakeMap is # trumped with one from NEIC, but this should happen less often # than an NEIC origin being made authoritative over a regional one. eventsource = network eventsourcecode = eid res, msg = transfer( config, doc, eventsourcecode, eventsource, version_folder, is_scenario=is_scenario, ) logger.info(msg) if not res: logger.critical('Error transferring PAGER content. "%s"' % msg) print("Created onePAGER pdf %s" % onepager_pdf) logger.info("Created onePAGER pdf %s" % onepager_pdf) logger.info("Done.") return True except Exception as e: f = io.StringIO() traceback.print_exc(file=f) msg = e msg = "%s\n %s" % (str(msg), f.getvalue()) hostname = socket.gethostname() msg = msg + "\n" + "Error occurred on %s\n" % (hostname) if gridfile is not None: msg = msg + "\n" + "Error on file: %s\n" % (gridfile) if eid is not None: msg = msg + "\n" + "Error on event: %s\n" % (eid) if pager_version is not None: msg = msg + "\n" + "Error on version: %i\n" % (pager_version) f.close() logger.critical(msg) logger.info("Sent error to email") return False