Esempio n. 1
0
 def getEventsBeforeDate(self, beforedate):
     events = []
     event_folders = self.getAllEventFolders()
     for event_folder in event_folders:
         version_folder = self.getLastVersion(event_folder)
         json_folder = os.path.join(version_folder,'json')
         pdata = PagerData()
         pdata.loadFromJSON(json_folder)
         if pdata.time < beforedate:
             events.append(pdata.id)
     return events
Esempio n. 2
0
    def query(self,start_time=datetime.datetime(1800,1,1),end_time=datetime.datetime.utcnow(),
              mag_threshold=0.0,alert_threshold='green',version='last',eventid=None):
        levels = {'green':0,
                  'yellow':1,
                  'orange':2,
                  'red':3}
        if eventid is not None:
            all_event_folders = [self.getEventFolder(eventid)]
            version = 'all'
        else:
            all_event_folders = self.getAllEventFolders()
        event_data = []
        do_process_time = False
        if eventid is not None:
            do_process_time = True
        
        df = pd.DataFrame(columns=PagerData.getSeriesColumns(processtime=do_process_time))
        jsonfolders = []
        for event_folder in all_event_folders:
            vnums = self.getVersionNumbers(event_folder)
            if version == 'first':
                vnum = vnums[0]
                jsonfolders.append(os.path.join(event_folder,'version.%03d' % vnum,'json'))
            elif version == 'last':
                vnum = vnums[-1]
                jsonfolders.append(os.path.join(event_folder,'version.%03d' % vnum,'json'))
            elif version == 'eight':
                for vnum in vnums:
                    jsonfolder = os.path.join(event_folder,'version.%03d' % vnum,'json')
                    pdata = PagerData()
                    try:
                        pdata.loadFromJSON(jsonfolder)
                    except:
                        continue
                    if pdata.processing_time >= pdata.time + datetime.timedelta(seconds=EIGHT_HOURS):
                        break
                    jsonfolders.append(jsonfolder)
            elif version == 'all':
                for vnum in vnums:
                    jsonfolder = os.path.join(event_folder,'version.%03d' % vnum,'json')
                    jsonfolders.append(jsonfolder)
            else:
                raise PagerException('version option "%s" not supported.' % version)

        broken = []
        for jsonfolder in jsonfolders:
            pdata = PagerData()
            vnum = 1000
            while vnum > 1:
                try:
                    pdata.loadFromJSON(jsonfolder)
                    vnum = 0
                except:
                    #handle the case where the most recent version of the event has some 
                    #sort of error causing it to miss
                    root,jsonfolder = os.path.split(jsonfolder)
                    root2,vfolder = os.path.split(root)
                    vt,vnums = vfolder.split('.')
                    vnum = int(vnums) - 1
                    jsonfolder = os.path.join(root2,'%s.%03d' % (vt,vnum),'json')
                
            if not pdata._is_validated:
                broken.append(jsonfolder)
            meetsLevel = levels[pdata.summary_alert] >= levels[alert_threshold]
            meetsMag = pdata.magnitude >= mag_threshold
            if pdata.time >= start_time and pdata.time <= end_time and meetsLevel and meetsMag:
                row = pdata.toSeries(processtime=do_process_time)
                df = df.append(row,ignore_index=True)
        df.Version = df.Version.astype(int)
        df.Elapsed = df.Elapsed.astype(int)
        df = df.sort_values('EventTime')
        df = df.set_index('EventID')
        return (df,broken)
Esempio n. 3
0
def test():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    fatfile = os.path.join(homedir, '..', 'data', 'fatality.xml')
    ecofile = os.path.join(homedir, '..', 'data', 'economy.xml')
    cityfile = os.path.join(homedir, '..', 'data', 'cities1000.txt')
    event = 'northridge'
    shakefile = os.path.join(homedir, '..', 'data',
                             'eventdata', event, '%s_grid.xml' % event)
    popfile = os.path.join(homedir, '..', 'data',
                           'eventdata', event, '%s_gpw.flt' % event)
    isofile = os.path.join(homedir, '..', 'data',
                           'eventdata', event, '%s_isogrid.bil' % event)
    urbanfile = os.path.join(homedir, '..', 'data',
                             'eventdata', 'northridge', 'northridge_urban.bil')
    oceanfile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.json')
    oceangridfile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.bil')
    timezonefile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_timezone.shp')

    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    casfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')

    tdir = tempfile.mkdtemp()
    basename = os.path.join(tdir, 'output')

    exp = Exposure(popfile, 2012, isofile)
    results = exp.calcExposure(shakefile)
    shakegrid = exp.getShakeGrid()
    popgrid = exp.getPopulationGrid()

    pdffile, pngfile, mapcities = draw_contour(
        shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename)
    shutil.rmtree(tdir)

    popyear = 2012

    shake_tuple = getHeaderData(shakefile)
    tsunami = shake_tuple[1]['magnitude'] >= TSUNAMI_MAG_THRESH

    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile, popyear, urbanfile, isofile)
    semiloss, resfat, nonresfat = semi.getLosses(shakefile)

    popgrowth = PopulationGrowth.fromDefault()
    econexp = EconExposure(popfile, 2012, isofile)
    fatmodel = EmpiricalLoss.fromDefaultFatality()
    expobject = Exposure(popfile, 2012, isofile, popgrowth)

    expdict = expobject.calcExposure(shakefile)
    fatdict = fatmodel.getLosses(expdict)
    econexpdict = econexp.calcExposure(shakefile)
    ecomodel = EmpiricalLoss.fromDefaultEconomic()
    ecodict = ecomodel.getLosses(expdict)
    shakegrid = econexp.getShakeGrid()
    pagerversion = 1
    cities = Cities.loadFromGeoNames(cityfile)
    impact1 = '''Red alert level for economic losses. Extensive damage is probable 
    and the disaster is likely widespread. Estimated economic losses are less 
    than 1% of GDP of Italy. Past events with this alert level have required 
    a national or international level response.'''
    impact2 = '''Orange alert level for shaking-related fatalities. Significant 
    casualties are likely.'''
    structcomment = '''Overall, the population in this region resides in structures 
    that are a mix of vulnerable and earthquake resistant construction. The predominant 
    vulnerable building types are unreinforced brick with mud and mid-rise nonductile 
    concrete frame with infill construction.'''
    histeq = [1, 2, 3]
    struct_comment = '''Overall, the population in this region resides
    in structures that are resistant to earthquake
    shaking, though some vulnerable structures
    exist.'''
    secondary_comment = '''Recent earthquakes in this area have caused secondary hazards 
    such as landslides that might have contributed to losses.'''
    hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador 
    on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 
    at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n', '')

    location = 'At the top of the world.'
    is_released = True

    doc = PagerData()
    eventcode = shakegrid.getEventDict()['event_id']
    versioncode = eventcode
    doc.setInputs(shakegrid, timezonefile, pagerversion,
                  versioncode, eventcode, tsunami, location, is_released)
    doc.setExposure(expdict, econexpdict)
    doc.setModelResults(fatmodel, ecomodel,
                        fatdict, ecodict,
                        semiloss, resfat, nonresfat)
    doc.setComments(impact1, impact2, struct_comment,
                    hist_comment, secondary_comment)
    doc.setMapInfo(cityfile, mapcities)
    doc.validate()

    # let's test the property methods
    tdoc(doc, shakegrid, impact1, impact2,
         expdict, struct_comment, hist_comment)

    # see if we can save this to a bunch of files then read them back in
    try:
        tdir = tempfile.mkdtemp()
        doc.saveToJSON(tdir)
        newdoc = PagerData()
        newdoc.loadFromJSON(tdir)
        tdoc(newdoc, shakegrid, impact1, impact2,
             expdict, struct_comment, hist_comment)

        # test the xml saving method
        xmlfile = doc.saveToLegacyXML(tdir)
    except Exception as e:
        assert 1 == 2
    finally:
        shutil.rmtree(tdir)
Esempio n. 4
0
    def query(self, start_time=datetime.datetime(1800, 1, 1), end_time=datetime.datetime.utcnow(),
              mag_threshold=0.0, alert_threshold='green', version='last', eventid=None):
        """Query PAGER file for events matching input parameters.

        :param start_time:
          Datetime indicating the minimum date/time for the search.
        :param end_time:
          Datetime indicating the maximum date/time for the search.
        :param mag_thresh:
          Minimum magnitude threshold.
        :param alert_threshold:
          Minimum alert level threshold ('green','yellow','orange','red').
        :param version:
          Which version(s) to select from events: 
            - 'all' Get all versions.
            - 'last' Get last version.
            - 'eight' Get first version that was created more than 8 hours after origin time.
        :param eventid:
          Return version(s) for specific event ID.
        :returns:
          Pandas dataframe containing columns:
            - 'EventID' - event ID
            - 'Impacted Country ($)' Country with largest dollar losses.
            - 'Version' - Version number
            - 'EventTime' - Origin Time
            - 'Lat' - Origin latitude.
            - 'Lon' - Origin longitude.
            - 'Depth' - Origin depth.
            - 'Mag' - Event magnitude.
            - 'MaxMMI' - Maximum MMI value (felt by at least 1000 people)
            - 'FatalityAlert' - Fatality alert level ('green','yellow','orange','red')
            - 'EconomicAlert' - Economic alert level ('green','yellow','orange','red')
            - 'SummaryAlert' - Summary alert level ('green','yellow','orange','red')
            - 'Elapsed' - Elapsed time (minutes) between origin time and version.
        """
        levels = {'green': 0,
                  'yellow': 1,
                  'orange': 2,
                  'red': 3}
        if eventid is not None:
            all_event_folders = [self.getEventFolder(eventid)]
            version = 'all'
        else:
            all_event_folders = self.getAllEventFolders()
        event_data = []
        do_process_time = False
        if eventid is not None:
            do_process_time = True

        df = pd.DataFrame(columns=PagerData.getSeriesColumns(
            processtime=do_process_time))
        jsonfolders = []
        for event_folder in all_event_folders:
            vnums = self.getVersionNumbers(event_folder)
            if version == 'first':
                vnum = vnums[0]
                jsonfolders.append(os.path.join(
                    event_folder, 'version.%03d' % vnum, 'json'))
            elif version == 'last':
                vnum = vnums[-1]
                jsonfolders.append(os.path.join(
                    event_folder, 'version.%03d' % vnum, 'json'))
            elif version == 'eight':
                for vnum in vnums:
                    jsonfolder = os.path.join(
                        event_folder, 'version.%03d' % vnum, 'json')
                    pdata = PagerData()
                    try:
                        pdata.loadFromJSON(jsonfolder)
                    except:
                        continue
                    if pdata.processing_time >= pdata.time + datetime.timedelta(seconds=EIGHT_HOURS):
                        break
                    jsonfolders.append(jsonfolder)
            elif version == 'all':
                for vnum in vnums:
                    jsonfolder = os.path.join(
                        event_folder, 'version.%03d' % vnum, 'json')
                    jsonfolders.append(jsonfolder)
            else:
                raise PagerException(
                    'version option "%s" not supported.' % version)

        broken = []
        for jsonfolder in jsonfolders:
            pdata = PagerData()
            vnum = 1000
            while vnum > 1:
                try:
                    pdata.loadFromJSON(jsonfolder)
                    vnum = 0
                except:
                    # handle the case where the most recent version of the event has some
                    # sort of error causing it to miss
                    root, jsonfolder = os.path.split(jsonfolder)
                    root2, vfolder = os.path.split(root)
                    vt, vnums = vfolder.split('.')
                    vnum = int(vnums) - 1
                    jsonfolder = os.path.join(
                        root2, '%s.%03d' % (vt, vnum), 'json')

            if not pdata._is_validated:
                broken.append(jsonfolder)
            try:
              meetsLevel = levels[pdata.summary_alert] >= levels[alert_threshold]
            except Exception as e:
              x = 1
            meetsMag = pdata.magnitude >= mag_threshold
            if pdata.time >= start_time and pdata.time <= end_time and meetsLevel and meetsMag:
                row = pdata.toSeries(processtime=do_process_time)
                df = df.append(row, ignore_index=True)
        df.Version = df.Version.astype(int)
        df.Elapsed = df.Elapsed.astype(int)
        df = df.sort_values('EventTime')
        df = df.set_index('EventID')
        return (df, broken)
Esempio n. 5
0
def test():
    homedir = os.path.dirname(os.path.abspath(__file__)) #where is this script?
    fatfile = os.path.join(homedir,'..','data','fatality.xml')
    ecofile = os.path.join(homedir,'..','data','economy.xml')
    cityfile = os.path.join(homedir,'..','data','cities1000.txt')
    event = 'northridge'
    shakefile = os.path.join(homedir,'..','data','eventdata',event,'%s_grid.xml' % event)
    popfile = os.path.join(homedir,'..','data','eventdata',event,'%s_gpw.flt' % event)
    isofile = os.path.join(homedir,'..','data','eventdata',event,'%s_isogrid.bil' % event)
    urbanfile = os.path.join(homedir,'..','data','eventdata','northridge','northridge_urban.bil')
    oceanfile = os.path.join(homedir,'..','data','eventdata','northridge','northridge_ocean.json')
    
    invfile = os.path.join(homedir,'..','data','semi_inventory.hdf')
    colfile = os.path.join(homedir,'..','data','semi_collapse_mmi.hdf')
    casfile = os.path.join(homedir,'..','data','semi_casualty.hdf')
    workfile = os.path.join(homedir,'..','data','semi_workforce.hdf')

    tdir = tempfile.mkdtemp()
    outfile = os.path.join(tdir,'output.pdf')
    pngfile,mapcities = draw_contour(shakefile,popfile,oceanfile,cityfile,outfile,make_png=True)
    shutil.rmtree(tdir)
    
    popyear = 2012

    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile,popyear,urbanfile,isofile)
    semiloss,resfat,nonresfat = semi.getLosses(shakefile)
    
    popgrowth = PopulationGrowth.fromDefault()
    econexp = EconExposure(popfile,2012,isofile)
    fatmodel = EmpiricalLoss.fromDefaultFatality()
    expobject = Exposure(popfile,2012,isofile,popgrowth)
    
    expdict = expobject.calcExposure(shakefile)
    fatdict = fatmodel.getLosses(expdict)
    econexpdict = econexp.calcExposure(shakefile)
    ecomodel = EmpiricalLoss.fromDefaultEconomic()
    ecodict = ecomodel.getLosses(expdict)
    shakegrid = econexp.getShakeGrid()
    pagerversion = 1
    cities = Cities.loadFromGeoNames(cityfile)
    impact1 = '''Red alert level for economic losses. Extensive damage is probable 
    and the disaster is likely widespread. Estimated economic losses are less 
    than 1% of GDP of Italy. Past events with this alert level have required 
    a national or international level response.'''
    impact2 = '''Orange alert level for shaking-related fatalities. Significant 
    casualties are likely.'''
    structcomment = '''Overall, the population in this region resides in structures 
    that are a mix of vulnerable and earthquake resistant construction. The predominant 
    vulnerable building types are unreinforced brick with mud and mid-rise nonductile 
    concrete frame with infill construction.'''
    histeq = [1,2,3]
    struct_comment = '''Overall, the population in this region resides
    in structures that are resistant to earthquake
    shaking, though some vulnerable structures
    exist.'''
    secondary_comment = '''Recent earthquakes in this area have caused secondary hazards 
    such as landslides that might have contributed to losses.'''
    hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador 
    on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 
    at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n','')
    doc = PagerData()
    doc.setInputs(shakegrid,pagerversion,shakegrid.getEventDict()['event_id'])
    doc.setExposure(expdict,econexpdict)
    doc.setModelResults(fatmodel,ecomodel,
                        fatdict,ecodict,
                        semiloss,resfat,nonresfat)
    doc.setComments(impact1,impact2,struct_comment,hist_comment,secondary_comment)
    doc.setMapInfo(cityfile,mapcities)
    doc.validate()

    eventinfo = doc.getEventInfo()
    assert eventinfo['mag'] == shakegrid.getEventDict()['magnitude']
    
    imp1,imp2 = doc.getImpactComments()
    assert imp1 == impact1 and imp2 == impact2

    version = doc.getSoftwareVersion()
    elapsed = doc.getElapsed()

    exp = doc.getTotalExposure()
    assert np.isclose(np.array(exp),expdict['TotalExposure']).all()

    hist_table = doc.getHistoricalTable()
    assert hist_table[0]['EventID'] == '199206281505'

    scomm = doc.getStructureComment()
    assert scomm == struct_comment
    
    hcomm = doc.getHistoricalComment()
    assert hcomm == hist_comment

    citytable = doc.getCityTable()
    assert citytable.iloc[0]['name'] == 'Santa Clarita'

    summary = doc.getSummaryAlert()
    assert summary == 'yellow'
Esempio n. 6
0
def main(pargs, config):
    # get the users home directory
    homedir = os.path.expanduser("~")

    # handle cancel messages
    if pargs.cancel:
        # we presume that pargs.gridfile in this context is an event ID.
        msg = _cancel(pargs.gridfile, config)
        print(msg)
        return True

    # what kind of thing is gridfile?
    is_file = os.path.isfile(pargs.gridfile)
    is_url, url_gridfile = _is_url(pargs.gridfile)
    is_pdl, pdl_gridfile = _check_pdl(pargs.gridfile, config)
    if is_file:
        gridfile = pargs.gridfile
    elif is_url:
        gridfile = url_gridfile
    elif is_pdl:
        gridfile = pdl_gridfile
    else:
        print("ShakeMap Grid file %s does not exist." % pargs.gridfile)
        return False

    pager_folder = os.path.join(homedir, config["output_folder"])
    pager_archive = os.path.join(homedir, config["archive_folder"])

    admin = PagerAdmin(pager_folder, pager_archive)

    # stdout will now be logged as INFO, stderr will be logged as WARNING
    mail_host = config["mail_hosts"][0]
    mail_from = config["mail_from"]
    developers = config["developers"]
    logfile = os.path.join(pager_folder, "pager.log")
    plog = PagerLogger(logfile, developers, mail_from, mail_host, debug=pargs.debug)
    logger = plog.getLogger()

    try:
        eid = None
        pager_version = None
        # get all the basic event information and print it, if requested
        shake_tuple = getHeaderData(gridfile)
        eid = shake_tuple[1]["event_id"].lower()
        etime = shake_tuple[1]["event_timestamp"]
        if not len(eid):
            eid = shake_tuple[0]["event_id"].lower()
        network = shake_tuple[1]["event_network"].lower()
        if network == "":
            network = "us"
        if not eid.startswith(network):
            eid = network + eid

        # Create a ComcatInfo object to hopefully tell us a number of things about this event
        try:
            ccinfo = ComCatInfo(eid)
            location = ccinfo.getLocation()
            tsunami = ccinfo.getTsunami()
            authid, allids = ccinfo.getAssociatedIds()
            authsource, othersources = ccinfo.getAssociatedSources()
        except:  # fail over to what we can determine locally
            location = shake_tuple[1]["event_description"]
            tsunami = shake_tuple[1]["magnitude"] >= TSUNAMI_MAG_THRESH
            authid = eid
            authsource = network
            allids = []

        # location field can be empty (None), which breaks a bunch of things
        if location is None:
            location = ""

        # Check to see if user wanted to override default tsunami criteria
        if pargs.tsunami != "auto":
            if pargs.tsunami == "on":
                tsunami = True
            else:
                tsunami = False

        # check to see if this event is a scenario
        is_scenario = False
        shakemap_type = shake_tuple[0]["shakemap_event_type"]
        if shakemap_type == "SCENARIO":
            is_scenario = True

        # if event is NOT a scenario and event time is in the future,
        # flag the event as a scenario and yell about it.
        if etime > datetime.datetime.utcnow():
            is_scenario = True
            logger.warning(
                "Event origin time is in the future! Flagging this as a scenario."
            )

        if is_scenario:
            if re.search("scenario", location.lower()) is None:
                location = "Scenario " + location

        # create the event directory (if it does not exist), and start logging there
        logger.info("Creating event directory")
        event_folder = admin.createEventFolder(authid, etime)

        # Stop processing if there is a "stop" file in the event folder
        stopfile = os.path.join(event_folder, "stop")
        if os.path.isfile(stopfile):
            fmt = '"stop" file found in %s.  Stopping processing, returning with 1.'
            logger.info(fmt % (event_folder))
            return True

        pager_version = get_pager_version(event_folder)
        version_folder = os.path.join(event_folder, "version.%03d" % pager_version)
        os.makedirs(version_folder)
        event_logfile = os.path.join(version_folder, "event.log")

        # this will turn off the global rotating log file
        # and switch to the one in the version folder.
        plog.setVersionHandler(event_logfile)

        # Copy the grid.xml file to the version folder
        # sometimes (usu when testing) the input grid isn't called grid.xml.  Rename it here.
        version_grid = os.path.join(version_folder, "grid.xml")
        shutil.copyfile(gridfile, version_grid)

        # Check to see if the tsunami flag has been previously set
        tsunami_toggle = {"on": 1, "off": 0}
        tsunami_file = os.path.join(event_folder, "tsunami")
        if os.path.isfile(tsunami_file):
            tsunami = tsunami_toggle[open(tsunami_file, "rt").read().strip()]

        # get the rest of the event info
        etime = shake_tuple[1]["event_timestamp"]
        elat = shake_tuple[1]["lat"]
        elon = shake_tuple[1]["lon"]
        emag = shake_tuple[1]["magnitude"]

        # get the year of the event
        event_year = shake_tuple[1]["event_timestamp"].year

        # find the population data collected most closely to the event_year
        pop_year, popfile = _get_pop_year(
            event_year, config["model_data"]["population_data"]
        )
        logger.info("Population year: %i Population file: %s\n" % (pop_year, popfile))

        # Get exposure results
        logger.info("Calculating population exposure.")
        isofile = config["model_data"]["country_grid"]
        expomodel = Exposure(popfile, pop_year, isofile)
        exposure = None
        exposure = expomodel.calcExposure(gridfile)

        # incidentally grab the country code of the epicenter
        numcode = expomodel._isogrid.getValue(elat, elon)
        if np.isnan(numcode):
            cdict = None
        else:
            cdict = Country().getCountry(int(numcode))
        if cdict is None:
            ccode = "UK"
        else:
            ccode = cdict["ISO2"]

        logger.info("Country code at epicenter is %s" % ccode)

        # get fatality results, if requested
        logger.info("Calculating empirical fatalities.")
        fatmodel = EmpiricalLoss.fromDefaultFatality()
        fatdict = fatmodel.getLosses(exposure)

        # get economic results, if requested
        logger.info("Calculating economic exposure.")
        econexpmodel = EconExposure(popfile, pop_year, isofile)
        ecomodel = EmpiricalLoss.fromDefaultEconomic()
        econexposure = econexpmodel.calcExposure(gridfile)
        ecodict = ecomodel.getLosses(econexposure)
        shakegrid = econexpmodel.getShakeGrid()

        # Get semi-empirical losses
        logger.info("Calculating semi-empirical fatalities.")
        urbanfile = config["model_data"]["urban_rural_grid"]
        if not os.path.isfile(urbanfile):
            raise PagerException("Urban-rural grid file %s does not exist." % urbanfile)

        semi = SemiEmpiricalFatality.fromDefault()
        semi.setGlobalFiles(popfile, pop_year, urbanfile, isofile)
        semiloss, resfat, nonresfat = semi.getLosses(gridfile)

        # get all of the other components of PAGER
        logger.info("Getting all comments.")
        # get the fatality and economic comments
        impact1, impact2 = get_impact_comments(
            fatdict, ecodict, econexposure, event_year, ccode
        )
        # get comment describing vulnerable structures in the region.
        struct_comment = get_structure_comment(resfat, nonresfat, semi)
        # get the comment describing historic secondary hazards
        secondary_comment = get_secondary_comment(elat, elon, emag)
        # get the comment describing historical comments in the region
        historical_comment = get_historical_comment(elat, elon, emag, exposure, fatdict)

        # generate the probability plots
        logger.info("Drawing probability plots.")
        fat_probs_file, eco_probs_file = _draw_probs(
            fatmodel, fatdict, ecomodel, ecodict, version_folder
        )

        # generate the exposure map
        exposure_base = os.path.join(version_folder, "exposure")
        logger.info("Generating exposure map...")
        oceanfile = config["model_data"]["ocean_vectors"]
        oceangrid = config["model_data"]["ocean_grid"]
        cityfile = config["model_data"]["city_file"]
        borderfile = config["model_data"]["border_vectors"]
        shake_grid = expomodel.getShakeGrid()
        pop_grid = expomodel.getPopulationGrid()
        pdf_file, png_file, mapcities = draw_contour(
            shake_grid,
            pop_grid,
            oceanfile,
            oceangrid,
            cityfile,
            exposure_base,
            borderfile,
            is_scenario=is_scenario,
        )
        logger.info("Generated exposure map %s" % pdf_file)

        # figure out whether this event has been "released".
        is_released = _get_release_status(
            pargs,
            config,
            fatmodel,
            fatdict,
            ecomodel,
            ecodict,
            shake_tuple,
            event_folder,
        )

        # Create a data object to encapsulate everything we know about the PAGER
        # results, and then serialize that to disk in the form of a number of JSON files.
        logger.info("Making PAGER Data object.")
        doc = PagerData()
        timezone_file = config["model_data"]["timezones_file"]
        elapsed = pargs.elapsed
        doc.setInputs(
            shakegrid,
            timezone_file,
            pager_version,
            shakegrid.getEventDict()["event_id"],
            authid,
            tsunami,
            location,
            is_released,
            elapsed=elapsed,
        )
        logger.info("Setting inputs.")
        doc.setExposure(exposure, econexposure)
        logger.info("Setting exposure.")
        doc.setModelResults(
            fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat
        )
        logger.info("Setting comments.")
        doc.setComments(
            impact1, impact2, struct_comment, historical_comment, secondary_comment
        )
        logger.info("Setting map info.")
        doc.setMapInfo(cityfile, mapcities)
        logger.info("Validating.")
        doc.validate()

        # if we have determined that the event is a scenario (origin time is in the future)
        # and the shakemap is not flagged as such, set the shakemap type in the
        # pagerdata object to be 'SCENARIO'.
        if is_scenario:
            doc.setToScenario()

        json_folder = os.path.join(version_folder, "json")
        os.makedirs(json_folder)
        logger.info("Saving output to JSON.")
        doc.saveToJSON(json_folder)
        logger.info("Saving output to XML.")
        doc.saveToLegacyXML(version_folder)

        logger.info("Creating onePAGER pdf...")
        onepager_pdf, error = create_onepager(doc, version_folder)
        if onepager_pdf is None:
            raise PagerException("Could not create onePAGER output: \n%s" % error)

        # copy the contents.xml file to the version folder
        contentsfile = get_data_path("contents.xml")
        if contentsfile is None:
            raise PagerException("Could not find contents.xml file.")
        shutil.copy(contentsfile, version_folder)

        # send pdf as attachment to internal team of PAGER users
        if not is_released and not is_scenario:
            message_pager(config, onepager_pdf, doc)

        # run transfer, as appropriate and as specified by config
        # the PAGER product eventsource and eventsourcecode should
        # match the input ShakeMap settings for these properties.
        # This can possibly cause confusion if a regional ShakeMap is
        # trumped with one from NEIC, but this should happen less often
        # than an NEIC origin being made authoritative over a regional one.
        eventsource = network
        eventsourcecode = eid
        res, msg = transfer(
            config,
            doc,
            eventsourcecode,
            eventsource,
            version_folder,
            is_scenario=is_scenario,
        )
        logger.info(msg)
        if not res:
            logger.critical('Error transferring PAGER content. "%s"' % msg)

        print("Created onePAGER pdf %s" % onepager_pdf)
        logger.info("Created onePAGER pdf %s" % onepager_pdf)

        logger.info("Done.")
        return True
    except Exception as e:
        f = io.StringIO()
        traceback.print_exc(file=f)
        msg = e
        msg = "%s\n %s" % (str(msg), f.getvalue())
        hostname = socket.gethostname()
        msg = msg + "\n" + "Error occurred on %s\n" % (hostname)
        if gridfile is not None:
            msg = msg + "\n" + "Error on file: %s\n" % (gridfile)
        if eid is not None:
            msg = msg + "\n" + "Error on event: %s\n" % (eid)
        if pager_version is not None:
            msg = msg + "\n" + "Error on version: %i\n" % (pager_version)
        f.close()
        logger.critical(msg)
        logger.info("Sent error to email")
        return False