def test(): homedir = os.path.dirname(os.path.abspath( __file__)) # where is this script? fatfile = os.path.join(homedir, '..', 'data', 'fatality.xml') ecofile = os.path.join(homedir, '..', 'data', 'economy.xml') cityfile = os.path.join(homedir, '..', 'data', 'cities1000.txt') event = 'northridge' shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) urbanfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_urban.bil') oceanfile = os.path.join( homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.json') oceangridfile = os.path.join( homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.bil') timezonefile = os.path.join( homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_timezone.shp') invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf') colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf') casfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf') workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf') tdir = tempfile.mkdtemp() basename = os.path.join(tdir, 'output') exp = Exposure(popfile, 2012, isofile) results = exp.calcExposure(shakefile) shakegrid = exp.getShakeGrid() popgrid = exp.getPopulationGrid() pdffile, pngfile, mapcities = draw_contour( shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename) shutil.rmtree(tdir) popyear = 2012 shake_tuple = getHeaderData(shakefile) tsunami = shake_tuple[1]['magnitude'] >= TSUNAMI_MAG_THRESH semi = SemiEmpiricalFatality.fromDefault() semi.setGlobalFiles(popfile, popyear, urbanfile, isofile) semiloss, resfat, nonresfat = semi.getLosses(shakefile) popgrowth = PopulationGrowth.fromDefault() econexp = EconExposure(popfile, 2012, isofile) fatmodel = EmpiricalLoss.fromDefaultFatality() expobject = Exposure(popfile, 2012, isofile, popgrowth) expdict = expobject.calcExposure(shakefile) fatdict = fatmodel.getLosses(expdict) econexpdict = econexp.calcExposure(shakefile) ecomodel = EmpiricalLoss.fromDefaultEconomic() ecodict = ecomodel.getLosses(expdict) shakegrid = econexp.getShakeGrid() pagerversion = 1 cities = Cities.loadFromGeoNames(cityfile) impact1 = '''Red alert level for economic losses. Extensive damage is probable and the disaster is likely widespread. Estimated economic losses are less than 1% of GDP of Italy. Past events with this alert level have required a national or international level response.''' impact2 = '''Orange alert level for shaking-related fatalities. Significant casualties are likely.''' structcomment = '''Overall, the population in this region resides in structures that are a mix of vulnerable and earthquake resistant construction. The predominant vulnerable building types are unreinforced brick with mud and mid-rise nonductile concrete frame with infill construction.''' histeq = [1, 2, 3] struct_comment = '''Overall, the population in this region resides in structures that are resistant to earthquake shaking, though some vulnerable structures exist.''' secondary_comment = '''Recent earthquakes in this area have caused secondary hazards such as landslides that might have contributed to losses.''' hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n', '') location = 'At the top of the world.' is_released = True doc = PagerData() eventcode = shakegrid.getEventDict()['event_id'] versioncode = eventcode doc.setInputs(shakegrid, timezonefile, pagerversion, versioncode, eventcode, tsunami, location, is_released) doc.setExposure(expdict, econexpdict) doc.setModelResults(fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat) doc.setComments(impact1, impact2, struct_comment, hist_comment, secondary_comment) doc.setMapInfo(cityfile, mapcities) doc.validate() # let's test the property methods tdoc(doc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment) # see if we can save this to a bunch of files then read them back in try: tdir = tempfile.mkdtemp() doc.saveToJSON(tdir) newdoc = PagerData() newdoc.loadFromJSON(tdir) tdoc(newdoc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment) # test the xml saving method xmlfile = doc.saveToLegacyXML(tdir) except Exception as e: assert 1 == 2 finally: shutil.rmtree(tdir)
def test(): homedir = os.path.dirname(os.path.abspath(__file__)) #where is this script? fatfile = os.path.join(homedir,'..','data','fatality.xml') ecofile = os.path.join(homedir,'..','data','economy.xml') cityfile = os.path.join(homedir,'..','data','cities1000.txt') event = 'northridge' shakefile = os.path.join(homedir,'..','data','eventdata',event,'%s_grid.xml' % event) popfile = os.path.join(homedir,'..','data','eventdata',event,'%s_gpw.flt' % event) isofile = os.path.join(homedir,'..','data','eventdata',event,'%s_isogrid.bil' % event) urbanfile = os.path.join(homedir,'..','data','eventdata','northridge','northridge_urban.bil') oceanfile = os.path.join(homedir,'..','data','eventdata','northridge','northridge_ocean.json') invfile = os.path.join(homedir,'..','data','semi_inventory.hdf') colfile = os.path.join(homedir,'..','data','semi_collapse_mmi.hdf') casfile = os.path.join(homedir,'..','data','semi_casualty.hdf') workfile = os.path.join(homedir,'..','data','semi_workforce.hdf') tdir = tempfile.mkdtemp() outfile = os.path.join(tdir,'output.pdf') pngfile,mapcities = draw_contour(shakefile,popfile,oceanfile,cityfile,outfile,make_png=True) shutil.rmtree(tdir) popyear = 2012 semi = SemiEmpiricalFatality.fromDefault() semi.setGlobalFiles(popfile,popyear,urbanfile,isofile) semiloss,resfat,nonresfat = semi.getLosses(shakefile) popgrowth = PopulationGrowth.fromDefault() econexp = EconExposure(popfile,2012,isofile) fatmodel = EmpiricalLoss.fromDefaultFatality() expobject = Exposure(popfile,2012,isofile,popgrowth) expdict = expobject.calcExposure(shakefile) fatdict = fatmodel.getLosses(expdict) econexpdict = econexp.calcExposure(shakefile) ecomodel = EmpiricalLoss.fromDefaultEconomic() ecodict = ecomodel.getLosses(expdict) shakegrid = econexp.getShakeGrid() pagerversion = 1 cities = Cities.loadFromGeoNames(cityfile) impact1 = '''Red alert level for economic losses. Extensive damage is probable and the disaster is likely widespread. Estimated economic losses are less than 1% of GDP of Italy. Past events with this alert level have required a national or international level response.''' impact2 = '''Orange alert level for shaking-related fatalities. Significant casualties are likely.''' structcomment = '''Overall, the population in this region resides in structures that are a mix of vulnerable and earthquake resistant construction. The predominant vulnerable building types are unreinforced brick with mud and mid-rise nonductile concrete frame with infill construction.''' histeq = [1,2,3] struct_comment = '''Overall, the population in this region resides in structures that are resistant to earthquake shaking, though some vulnerable structures exist.''' secondary_comment = '''Recent earthquakes in this area have caused secondary hazards such as landslides that might have contributed to losses.''' hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n','') doc = PagerData() doc.setInputs(shakegrid,pagerversion,shakegrid.getEventDict()['event_id']) doc.setExposure(expdict,econexpdict) doc.setModelResults(fatmodel,ecomodel, fatdict,ecodict, semiloss,resfat,nonresfat) doc.setComments(impact1,impact2,struct_comment,hist_comment,secondary_comment) doc.setMapInfo(cityfile,mapcities) doc.validate() eventinfo = doc.getEventInfo() assert eventinfo['mag'] == shakegrid.getEventDict()['magnitude'] imp1,imp2 = doc.getImpactComments() assert imp1 == impact1 and imp2 == impact2 version = doc.getSoftwareVersion() elapsed = doc.getElapsed() exp = doc.getTotalExposure() assert np.isclose(np.array(exp),expdict['TotalExposure']).all() hist_table = doc.getHistoricalTable() assert hist_table[0]['EventID'] == '199206281505' scomm = doc.getStructureComment() assert scomm == struct_comment hcomm = doc.getHistoricalComment() assert hcomm == hist_comment citytable = doc.getCityTable() assert citytable.iloc[0]['name'] == 'Santa Clarita' summary = doc.getSummaryAlert() assert summary == 'yellow'
def test(): event = 'northridge' homedir = os.path.dirname( os.path.abspath(__file__)) # where is this script? shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) shapefile = os.path.join(homedir, '..', 'data', 'eventdata', event, 'City_BoundariesWGS84', 'City_Boundaries.shp') print('Test loading economic exposure from inputs...') econexp = EconExposure(popfile, 2012, isofile) print('Passed loading economic exposure from inputs...') print('Test loading empirical fatality model from XML file...') ecomodel = EmpiricalLoss.fromDefaultEconomic() print('Passed loading empirical fatality model from XML file.') print('Testing calculating probabilities for standard PAGER ranges...') expected = {'UK': 6819.883892 * 1e6, 'TotalDollars': 6819.883892 * 1e6} G = 2.5 probs = ecomodel.getProbabilities(expected, G) testprobs = { '0-1': 0.00020696841425738358, '1-10': 0.0043200811319132086, '10-100': 0.041085446477813294, '100-1000': 0.17564981840854255, '1000-10000': 0.33957681768639003, '10000-100000': 0.29777890303065313, '100000-10000000': 0.14138196485040311 } for key, value in probs.items(): np.testing.assert_almost_equal(value, testprobs[key]) msg = ('Passed combining G values from all countries that ' 'contributed to losses...') print(msg) print('Test retrieving economic model data from XML file...') model = ecomodel.getModel('af') testmodel = LognormalModel('dummy', 9.013810, 0.100000, 4.113200, alpha=15.065400) assert model == testmodel print('Passed retrieving economic model data from XML file.') print('Testing with known exposures/losses for 1994 Northridge EQ...') exposure = { 'xf': np.array([ 0, 0, 556171936.807, 718990717350.0, 2.40385709638e+12, 2.47073141687e+12, 1.2576210799e+12, 698888019337.0, 1913733716.16, 0.0 ]) } expodict = ecomodel.getLosses(exposure) testdict = {'xf': 25945225582} assert expodict['xf'] == testdict['xf'] msg = ('Passed testing with known exposures/fatalities for ' '1994 Northridge EQ.') print(msg) print('Testing calculating total economic losses for Northridge...') expdict = econexp.calcExposure(shakefile) ecomodel = EmpiricalLoss.fromDefaultEconomic() lossdict = ecomodel.getLosses(expdict) testdict = {'XF': 23172277187} assert lossdict['XF'] == testdict['XF'] print('Passed calculating total economic losses for Northridge...') print('Testing creating a economic loss grid...') mmidata = econexp.getShakeGrid().getLayer('mmi').getData() popdata = econexp.getEconPopulationGrid().getData() isodata = econexp.getCountryGrid().getData() ecogrid = ecomodel.getLossGrid(mmidata, popdata, isodata) ecosum = 23172275857.094917 assert np.nansum(ecogrid) == ecosum print('Passed creating a economic loss grid.') print('Testing assigning economic losses to polygons...') popdict = econexp.getPopulationGrid().getGeoDict() shapes = [] f = fiona.open(shapefile, 'r') for row in f: shapes.append(row) f.close() ecoshapes, toteco = ecomodel.getLossByShapes(mmidata, popdata, isodata, shapes, popdict) ecoshapes = sorted(ecoshapes, key=lambda shape: shape['properties']['dollars_lost'], reverse=True) lalosses = 17323352577 for shape in ecoshapes: if shape['id'] == '312': # Los Angeles cname = shape['properties']['CITY_NAME'] dollars = shape['properties']['dollars_lost'] assert lalosses == dollars assert cname == 'Los Angeles' print('Passed assigning economic losses to polygons...')
def main(pargs, config): # get the users home directory homedir = os.path.expanduser("~") # handle cancel messages if pargs.cancel: # we presume that pargs.gridfile in this context is an event ID. msg = _cancel(pargs.gridfile, config) print(msg) return True # what kind of thing is gridfile? is_file = os.path.isfile(pargs.gridfile) is_url, url_gridfile = _is_url(pargs.gridfile) is_pdl, pdl_gridfile = _check_pdl(pargs.gridfile, config) if is_file: gridfile = pargs.gridfile elif is_url: gridfile = url_gridfile elif is_pdl: gridfile = pdl_gridfile else: print("ShakeMap Grid file %s does not exist." % pargs.gridfile) return False pager_folder = os.path.join(homedir, config["output_folder"]) pager_archive = os.path.join(homedir, config["archive_folder"]) admin = PagerAdmin(pager_folder, pager_archive) # stdout will now be logged as INFO, stderr will be logged as WARNING mail_host = config["mail_hosts"][0] mail_from = config["mail_from"] developers = config["developers"] logfile = os.path.join(pager_folder, "pager.log") plog = PagerLogger(logfile, developers, mail_from, mail_host, debug=pargs.debug) logger = plog.getLogger() try: eid = None pager_version = None # get all the basic event information and print it, if requested shake_tuple = getHeaderData(gridfile) eid = shake_tuple[1]["event_id"].lower() etime = shake_tuple[1]["event_timestamp"] if not len(eid): eid = shake_tuple[0]["event_id"].lower() network = shake_tuple[1]["event_network"].lower() if network == "": network = "us" if not eid.startswith(network): eid = network + eid # Create a ComcatInfo object to hopefully tell us a number of things about this event try: ccinfo = ComCatInfo(eid) location = ccinfo.getLocation() tsunami = ccinfo.getTsunami() authid, allids = ccinfo.getAssociatedIds() authsource, othersources = ccinfo.getAssociatedSources() except: # fail over to what we can determine locally location = shake_tuple[1]["event_description"] tsunami = shake_tuple[1]["magnitude"] >= TSUNAMI_MAG_THRESH authid = eid authsource = network allids = [] # location field can be empty (None), which breaks a bunch of things if location is None: location = "" # Check to see if user wanted to override default tsunami criteria if pargs.tsunami != "auto": if pargs.tsunami == "on": tsunami = True else: tsunami = False # check to see if this event is a scenario is_scenario = False shakemap_type = shake_tuple[0]["shakemap_event_type"] if shakemap_type == "SCENARIO": is_scenario = True # if event is NOT a scenario and event time is in the future, # flag the event as a scenario and yell about it. if etime > datetime.datetime.utcnow(): is_scenario = True logger.warning( "Event origin time is in the future! Flagging this as a scenario." ) if is_scenario: if re.search("scenario", location.lower()) is None: location = "Scenario " + location # create the event directory (if it does not exist), and start logging there logger.info("Creating event directory") event_folder = admin.createEventFolder(authid, etime) # Stop processing if there is a "stop" file in the event folder stopfile = os.path.join(event_folder, "stop") if os.path.isfile(stopfile): fmt = '"stop" file found in %s. Stopping processing, returning with 1.' logger.info(fmt % (event_folder)) return True pager_version = get_pager_version(event_folder) version_folder = os.path.join(event_folder, "version.%03d" % pager_version) os.makedirs(version_folder) event_logfile = os.path.join(version_folder, "event.log") # this will turn off the global rotating log file # and switch to the one in the version folder. plog.setVersionHandler(event_logfile) # Copy the grid.xml file to the version folder # sometimes (usu when testing) the input grid isn't called grid.xml. Rename it here. version_grid = os.path.join(version_folder, "grid.xml") shutil.copyfile(gridfile, version_grid) # Check to see if the tsunami flag has been previously set tsunami_toggle = {"on": 1, "off": 0} tsunami_file = os.path.join(event_folder, "tsunami") if os.path.isfile(tsunami_file): tsunami = tsunami_toggle[open(tsunami_file, "rt").read().strip()] # get the rest of the event info etime = shake_tuple[1]["event_timestamp"] elat = shake_tuple[1]["lat"] elon = shake_tuple[1]["lon"] emag = shake_tuple[1]["magnitude"] # get the year of the event event_year = shake_tuple[1]["event_timestamp"].year # find the population data collected most closely to the event_year pop_year, popfile = _get_pop_year( event_year, config["model_data"]["population_data"] ) logger.info("Population year: %i Population file: %s\n" % (pop_year, popfile)) # Get exposure results logger.info("Calculating population exposure.") isofile = config["model_data"]["country_grid"] expomodel = Exposure(popfile, pop_year, isofile) exposure = None exposure = expomodel.calcExposure(gridfile) # incidentally grab the country code of the epicenter numcode = expomodel._isogrid.getValue(elat, elon) if np.isnan(numcode): cdict = None else: cdict = Country().getCountry(int(numcode)) if cdict is None: ccode = "UK" else: ccode = cdict["ISO2"] logger.info("Country code at epicenter is %s" % ccode) # get fatality results, if requested logger.info("Calculating empirical fatalities.") fatmodel = EmpiricalLoss.fromDefaultFatality() fatdict = fatmodel.getLosses(exposure) # get economic results, if requested logger.info("Calculating economic exposure.") econexpmodel = EconExposure(popfile, pop_year, isofile) ecomodel = EmpiricalLoss.fromDefaultEconomic() econexposure = econexpmodel.calcExposure(gridfile) ecodict = ecomodel.getLosses(econexposure) shakegrid = econexpmodel.getShakeGrid() # Get semi-empirical losses logger.info("Calculating semi-empirical fatalities.") urbanfile = config["model_data"]["urban_rural_grid"] if not os.path.isfile(urbanfile): raise PagerException("Urban-rural grid file %s does not exist." % urbanfile) semi = SemiEmpiricalFatality.fromDefault() semi.setGlobalFiles(popfile, pop_year, urbanfile, isofile) semiloss, resfat, nonresfat = semi.getLosses(gridfile) # get all of the other components of PAGER logger.info("Getting all comments.") # get the fatality and economic comments impact1, impact2 = get_impact_comments( fatdict, ecodict, econexposure, event_year, ccode ) # get comment describing vulnerable structures in the region. struct_comment = get_structure_comment(resfat, nonresfat, semi) # get the comment describing historic secondary hazards secondary_comment = get_secondary_comment(elat, elon, emag) # get the comment describing historical comments in the region historical_comment = get_historical_comment(elat, elon, emag, exposure, fatdict) # generate the probability plots logger.info("Drawing probability plots.") fat_probs_file, eco_probs_file = _draw_probs( fatmodel, fatdict, ecomodel, ecodict, version_folder ) # generate the exposure map exposure_base = os.path.join(version_folder, "exposure") logger.info("Generating exposure map...") oceanfile = config["model_data"]["ocean_vectors"] oceangrid = config["model_data"]["ocean_grid"] cityfile = config["model_data"]["city_file"] borderfile = config["model_data"]["border_vectors"] shake_grid = expomodel.getShakeGrid() pop_grid = expomodel.getPopulationGrid() pdf_file, png_file, mapcities = draw_contour( shake_grid, pop_grid, oceanfile, oceangrid, cityfile, exposure_base, borderfile, is_scenario=is_scenario, ) logger.info("Generated exposure map %s" % pdf_file) # figure out whether this event has been "released". is_released = _get_release_status( pargs, config, fatmodel, fatdict, ecomodel, ecodict, shake_tuple, event_folder, ) # Create a data object to encapsulate everything we know about the PAGER # results, and then serialize that to disk in the form of a number of JSON files. logger.info("Making PAGER Data object.") doc = PagerData() timezone_file = config["model_data"]["timezones_file"] elapsed = pargs.elapsed doc.setInputs( shakegrid, timezone_file, pager_version, shakegrid.getEventDict()["event_id"], authid, tsunami, location, is_released, elapsed=elapsed, ) logger.info("Setting inputs.") doc.setExposure(exposure, econexposure) logger.info("Setting exposure.") doc.setModelResults( fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat ) logger.info("Setting comments.") doc.setComments( impact1, impact2, struct_comment, historical_comment, secondary_comment ) logger.info("Setting map info.") doc.setMapInfo(cityfile, mapcities) logger.info("Validating.") doc.validate() # if we have determined that the event is a scenario (origin time is in the future) # and the shakemap is not flagged as such, set the shakemap type in the # pagerdata object to be 'SCENARIO'. if is_scenario: doc.setToScenario() json_folder = os.path.join(version_folder, "json") os.makedirs(json_folder) logger.info("Saving output to JSON.") doc.saveToJSON(json_folder) logger.info("Saving output to XML.") doc.saveToLegacyXML(version_folder) logger.info("Creating onePAGER pdf...") onepager_pdf, error = create_onepager(doc, version_folder) if onepager_pdf is None: raise PagerException("Could not create onePAGER output: \n%s" % error) # copy the contents.xml file to the version folder contentsfile = get_data_path("contents.xml") if contentsfile is None: raise PagerException("Could not find contents.xml file.") shutil.copy(contentsfile, version_folder) # send pdf as attachment to internal team of PAGER users if not is_released and not is_scenario: message_pager(config, onepager_pdf, doc) # run transfer, as appropriate and as specified by config # the PAGER product eventsource and eventsourcecode should # match the input ShakeMap settings for these properties. # This can possibly cause confusion if a regional ShakeMap is # trumped with one from NEIC, but this should happen less often # than an NEIC origin being made authoritative over a regional one. eventsource = network eventsourcecode = eid res, msg = transfer( config, doc, eventsourcecode, eventsource, version_folder, is_scenario=is_scenario, ) logger.info(msg) if not res: logger.critical('Error transferring PAGER content. "%s"' % msg) print("Created onePAGER pdf %s" % onepager_pdf) logger.info("Created onePAGER pdf %s" % onepager_pdf) logger.info("Done.") return True except Exception as e: f = io.StringIO() traceback.print_exc(file=f) msg = e msg = "%s\n %s" % (str(msg), f.getvalue()) hostname = socket.gethostname() msg = msg + "\n" + "Error occurred on %s\n" % (hostname) if gridfile is not None: msg = msg + "\n" + "Error on file: %s\n" % (gridfile) if eid is not None: msg = msg + "\n" + "Error on event: %s\n" % (eid) if pager_version is not None: msg = msg + "\n" + "Error on version: %i\n" % (pager_version) f.close() logger.critical(msg) logger.info("Sent error to email") return False