Beispiel #1
0
def model_test_real():
    #test with real data
    popyear = 2012
    homedir = os.path.dirname(
        os.path.abspath(__file__))  #where is this script?
    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    fatfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')
    growthfile = os.path.join(homedir, '..', 'data',
                              'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls')
    popfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                           'northridge_gpw.flt')
    shakefile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                             'northridge_grid.xml')

    isofile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                           'northridge_isogrid.bil')
    urbanfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                             'northridge_urban.bil')
    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile, popyear, urbanfile, isofile)

    print('Testing semi-empirical losses...')
    losses, resfat, nonresfat = semi.getLosses(shakefile)
    testlosses = 539
    assert testlosses == losses
    print('Passed.')
Beispiel #2
0
def test_structure():
    resfat = {'IN': {'A1': 434, 'A2': 837}, 'NP': {'UFB': 200, 'W1': 100}}
    nonresfat = {'IN': {'A1': 434, 'A2': 837}, 'NP': {'UFB': 200, 'W1': 100}}
    semimodel = SemiEmpiricalFatality.fromDefault()
    structure_comment = get_structure_comment(resfat, nonresfat, semimodel)
    cmpstr = 'Overall, the population in this region resides in structures that are vulnerable to earthquake shaking, though resistant structures exist.  The predominant vulnerable building type is adobe block with light roof construction.'
    assert structure_comment == cmpstr
Beispiel #3
0
def test_manual_calcs():
    # where is this script?
    homedir = os.path.dirname(os.path.abspath(__file__))
    popfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                           'northridge_gpw.flt')
    urbfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                           'northridge_urban.bil')
    isofile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                           'northridge_isogrid.bil')
    ccode = 'ID'
    timeofday = 'day'
    density = URBAN
    pop = 100000
    mmi = 8.5
    popyear = 2016
    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile, popyear, urbfile, isofile)

    # let's do the calculations "manually" by getting all of the data and doing our own multiplications
    workforce = semi.getWorkforce(ccode)
    res, nonres, outside = pop_dist(pop, workforce, timeofday, density)
    resinv, nonresinv = semi.getInventories(ccode, density)
    res_collapse = semi.getCollapse(ccode, mmi, resinv)
    nonres_collapse = semi.getCollapse(ccode, mmi, nonresinv)
    res_fat_rates = semi.getFatalityRates(ccode, timeofday, resinv)
    nonres_fat_rates = semi.getFatalityRates(ccode, timeofday, nonresinv)
    res_fats = res * resinv * res_collapse * res_fat_rates
    nonres_fats = nonres * nonresinv * nonres_collapse * nonres_fat_rates
    # print(res_fats)
    # print(nonres_fats)
    fatsum = int(res_fats.sum() + nonres_fats.sum())
    print('Testing that "manual" calculations achieve tested result...')
    assert fatsum == 383
    print('Passed.')
Beispiel #4
0
def model_test_single():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  #where is this script?
    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    fatfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')
    growthfile = os.path.join(homedir, '..', 'data',
                              'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls')
    popfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                           'northridge_gpw.flt')
    urbfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                           'northridge_urban.bil')
    isofile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge',
                           'northridge_isogrid.bil')
    ccode = 'ID'
    timeofday = 'day'
    density = URBAN
    pop = 100000
    mmi = 8.5
    popyear = 2016
    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile, popyear, urbfile, isofile)

    #let's do the calculations "manually" by getting all of the data and doing our own multiplications
    workforce = semi.getWorkforce(ccode)
    res, nonres, outside = pop_dist(pop, workforce, timeofday, density)
    resinv, nonresinv = semi.getInventories(ccode, density)
    res_collapse = semi.getCollapse(ccode, mmi, resinv)
    nonres_collapse = semi.getCollapse(ccode, mmi, nonresinv)
    res_fat_rates = semi.getFatalityRates(ccode, timeofday, resinv)
    nonres_fat_rates = semi.getFatalityRates(ccode, timeofday, nonresinv)
    res_fats = res * resinv * res_collapse * res_fat_rates
    nonres_fats = nonres * nonresinv * nonres_collapse * nonres_fat_rates
    #print(res_fats)
    #print(nonres_fats)
    fatsum = int(res_fats.sum() + nonres_fats.sum())
    print('Testing that "manual" calculations achieve tested result...')
    assert fatsum == 383
    print('Passed.')

    loss, resfat, nresfat = make_test_semi_model(ccode, timeofday, density,
                                                 pop, mmi)
    print(
        'Testing that "manual" calculations achieve same results as grid calculations...'
    )
    assert fatsum == loss
    print('Passed.')
Beispiel #5
0
def get_structure_comment(resfat, nonresfat, semimodel):
    """Create a paragraph describing the vulnerability of buildings in the most impacted country.

    :param resfat:
      Dictionary of losses by building type in residential areas.
    :param nonresfat:
      Dictionary of losses by building type in non-residential areas.
    :param semimodel:
      Instance of SemiEmpiricalFatality class.
    :returns:
      Paragraph of text describing the vulnerability of buildings in the most impacted country.
    """
    maxccode = ''
    maxfat = 0
    ccodes = resfat.keys()
    if not len(ccodes):
        return 'There are likely to be no affected structures in this region.'
    for ccode in ccodes:
        resfatdict = resfat[ccode]
        nonresfatdict = nonresfat[ccode]
        resfatsum = np.array(list(resfatdict.values())).sum()
        nonresfatsum = np.array(list(nonresfatdict.values())).sum()
        fatsum = resfatsum + nonresfatsum
        if fatsum >= maxfat:
            maxfat = fatsum
            maxccode = ccode

    if fatsum == 0:
        RURAL = 1
        URBAN = 2
        semi = SemiEmpiricalFatality.fromDefault()
        all_collapse_by_btype = pd.Series({})
        #get the inventories in this ccode for both densities and
        #both residency classes
        res_urban_inv, non_res_urban_inv = semi.getInventories(maxccode, URBAN)
        res_rural_inv, non_res_rural_inv = semi.getInventories(maxccode, RURAL)
        #find unique building types from these four Series
        urban_inv_keys = set(res_urban_inv.index).union(
            set(non_res_urban_inv.index))
        rural_inv_keys = set(res_rural_inv.index).union(
            set(non_res_rural_inv.index))
        inv_keys = set(urban_inv_keys).union(set(rural_inv_keys))
        null_inventory = pd.Series(dict.fromkeys(inv_keys, 0.0))
        collapse_by_btype = pd.Series(dict.fromkeys(inv_keys, 0.0))
        for mmi in np.arange(6.0, 9.5, 0.5):
            collapse = semi.getCollapse(ccode, mmi, null_inventory)
            collapse_by_btype += collapse

        collapse_by_btype.sort_values(ascending=False, inplace=True)
        stypes = collapse_by_btype[0:2]
    else:
        # get a pandas Series of all the unique building types in the
        # country of greatest impact, sorted by losses (high to low).
        stypes = _add_dicts(resfat[maxccode], nonresfat[maxccode])

    pregions = PagerRegions()
    regioncode = pregions.getRegion(maxccode)
    default = pregions.getComment(regioncode)
    if len(stypes) == 0:
        if default != '':
            return default
        else:
            return 'There are likely to be no affected structures in this region.'

    tstarts = [
        'W*', 'S*', 'C*', 'P*', 'RM*', 'MH', 'M*', 'A*', 'RE', 'RS*', 'DS*',
        'UFB*', 'UCB', 'MS', 'TU', 'INF', 'UNK'
    ]
    categories = []
    btypes = []
    for stype in stypes.index:
        if stype in tstarts:
            btypes.append(stype)
            categories.append(stype)
        else:
            nc = 1
            while nc <= len(stype):
                ns = stype[0:nc] + '*'
                if ns in tstarts and ns not in categories:
                    btypes.append(stype)
                    categories.append(ns)
                    break
                nc += 1
        if len(btypes) == 2:
            break

    fmt1 = 'The predominant vulnerable building type is %s construction.'
    fmt2 = 'The predominant vulnerable building types are %s and %s construction.'
    if len(btypes) == 2:
        b1 = semimodel.getBuildingDesc(btypes[0])
        b2 = semimodel.getBuildingDesc(btypes[1])
        if b1.strip() == b2.strip():
            comment = fmt1 % (b1)
        else:
            regtext = fmt2 % (b1, b2)
    else:
        b1 = semimodel.getBuildingDesc(btypes[0])
        regtext = fmt1 % b1
    return default + '  ' + regtext
Beispiel #6
0
def model_test_simple():
    A = 4  #ccode for afghanistan
    J = 392  #ccode for japan
    R = 1  #rural code
    U = 2  #urban code
    #create a 5x5 population data set with 1000 people in each cell
    popdata = np.ones((5, 5)) * 1000.0
    #create a mixed grid of afghanistan and japan (have very different inventory,collapse, and fatality rates.)
    isodata = np.array([[A, A, A, A, A], [A, A, A, A, A], [A, A, A, J, J],
                        [J, J, J, J, J], [J, J, J, J, J]],
                       dtype=np.int16)
    #make a mix of urban and rural cells
    urbdata = np.array([[R, R, R, R, R], [R, U, U, U, R], [R, U, U, U, U],
                        [U, U, U, R, R], [R, R, R, R, R]],
                       dtype=np.int16)
    mmidata = np.array([[6, 7, 8, 9, 6], [7, 8, 9, 6, 7], [8, 9, 6, 6, 7],
                        [8, 9, 6, 7, 8], [9, 6, 7, 8, 9]],
                       dtype=np.float32)
    homedir = os.path.dirname(
        os.path.abspath(__file__))  #where is this script?
    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    fatfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')
    growthfile = os.path.join(homedir, '..', 'data',
                              'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls')
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })

    popgrid = GMTGrid(popdata, geodict)
    isogrid = GMTGrid(isodata, geodict)
    urbgrid = GMTGrid(urbdata, geodict)
    popyear = 2016
    layers = {'mmi': mmidata}
    eventdict = {
        'event_id': '1234',
        'magnitude': 7.5,
        'lat': 34.2,
        'lon': 118.2,
        'depth': 10.0,
        'event_timestamp': datetime(2016, 1, 1, 0, 0, 0),
        'event_description': 'test data',
        'event_network': 'us'
    }
    shakedict = {
        'event_id': '1234',
        'shakemap_id': '1234',
        'shakemap_version': 1,
        'code_version': '1.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'SCENARIO'
    }
    uncdict = {'mmi': (1.0, 1)}
    mmigrid = ShakeGrid(layers, geodict, eventdict, shakedict, uncdict)

    popfile = isofile = urbfile = shakefile = ''
    try:
        #make some temporary files
        f, popfile = tempfile.mkstemp()
        os.close(f)
        f, isofile = tempfile.mkstemp()
        os.close(f)
        f, urbfile = tempfile.mkstemp()
        os.close(f)
        f, shakefile = tempfile.mkstemp()
        os.close(f)

        popgrid.save(popfile)
        isogrid.save(isofile)
        urbgrid.save(urbfile)
        mmigrid.save(shakefile)

        semi = SemiEmpiricalFatality.fromDefault()
        losses, resfat, nonresfat = semi.getLosses(shakefile)
        assert losses == 85
        print('Semi-empirical model calculations appear to be done correctly.')
    except:
        print(
            'There is an error attempting to do semi-empirical loss calculations.'
        )
    finally:
        files = [popfile, isofile, urbfile, shakefile]
        for fname in files:
            if os.path.isfile(fname):
                os.remove(fname)
Beispiel #7
0
def test():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    fatfile = os.path.join(homedir, '..', 'data', 'fatality.xml')
    ecofile = os.path.join(homedir, '..', 'data', 'economy.xml')
    cityfile = os.path.join(homedir, '..', 'data', 'cities1000.txt')
    event = 'northridge'
    shakefile = os.path.join(homedir, '..', 'data',
                             'eventdata', event, '%s_grid.xml' % event)
    popfile = os.path.join(homedir, '..', 'data',
                           'eventdata', event, '%s_gpw.flt' % event)
    isofile = os.path.join(homedir, '..', 'data',
                           'eventdata', event, '%s_isogrid.bil' % event)
    urbanfile = os.path.join(homedir, '..', 'data',
                             'eventdata', 'northridge', 'northridge_urban.bil')
    oceanfile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.json')
    oceangridfile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.bil')
    timezonefile = os.path.join(
        homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_timezone.shp')

    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    casfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')

    tdir = tempfile.mkdtemp()
    basename = os.path.join(tdir, 'output')

    exp = Exposure(popfile, 2012, isofile)
    results = exp.calcExposure(shakefile)
    shakegrid = exp.getShakeGrid()
    popgrid = exp.getPopulationGrid()

    pdffile, pngfile, mapcities = draw_contour(
        shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename)
    shutil.rmtree(tdir)

    popyear = 2012

    shake_tuple = getHeaderData(shakefile)
    tsunami = shake_tuple[1]['magnitude'] >= TSUNAMI_MAG_THRESH

    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile, popyear, urbanfile, isofile)
    semiloss, resfat, nonresfat = semi.getLosses(shakefile)

    popgrowth = PopulationGrowth.fromDefault()
    econexp = EconExposure(popfile, 2012, isofile)
    fatmodel = EmpiricalLoss.fromDefaultFatality()
    expobject = Exposure(popfile, 2012, isofile, popgrowth)

    expdict = expobject.calcExposure(shakefile)
    fatdict = fatmodel.getLosses(expdict)
    econexpdict = econexp.calcExposure(shakefile)
    ecomodel = EmpiricalLoss.fromDefaultEconomic()
    ecodict = ecomodel.getLosses(expdict)
    shakegrid = econexp.getShakeGrid()
    pagerversion = 1
    cities = Cities.loadFromGeoNames(cityfile)
    impact1 = '''Red alert level for economic losses. Extensive damage is probable 
    and the disaster is likely widespread. Estimated economic losses are less 
    than 1% of GDP of Italy. Past events with this alert level have required 
    a national or international level response.'''
    impact2 = '''Orange alert level for shaking-related fatalities. Significant 
    casualties are likely.'''
    structcomment = '''Overall, the population in this region resides in structures 
    that are a mix of vulnerable and earthquake resistant construction. The predominant 
    vulnerable building types are unreinforced brick with mud and mid-rise nonductile 
    concrete frame with infill construction.'''
    histeq = [1, 2, 3]
    struct_comment = '''Overall, the population in this region resides
    in structures that are resistant to earthquake
    shaking, though some vulnerable structures
    exist.'''
    secondary_comment = '''Recent earthquakes in this area have caused secondary hazards 
    such as landslides that might have contributed to losses.'''
    hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador 
    on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 
    at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n', '')

    location = 'At the top of the world.'
    is_released = True

    doc = PagerData()
    eventcode = shakegrid.getEventDict()['event_id']
    versioncode = eventcode
    doc.setInputs(shakegrid, timezonefile, pagerversion,
                  versioncode, eventcode, tsunami, location, is_released)
    doc.setExposure(expdict, econexpdict)
    doc.setModelResults(fatmodel, ecomodel,
                        fatdict, ecodict,
                        semiloss, resfat, nonresfat)
    doc.setComments(impact1, impact2, struct_comment,
                    hist_comment, secondary_comment)
    doc.setMapInfo(cityfile, mapcities)
    doc.validate()

    # let's test the property methods
    tdoc(doc, shakegrid, impact1, impact2,
         expdict, struct_comment, hist_comment)

    # see if we can save this to a bunch of files then read them back in
    try:
        tdir = tempfile.mkdtemp()
        doc.saveToJSON(tdir)
        newdoc = PagerData()
        newdoc.loadFromJSON(tdir)
        tdoc(newdoc, shakegrid, impact1, impact2,
             expdict, struct_comment, hist_comment)

        # test the xml saving method
        xmlfile = doc.saveToLegacyXML(tdir)
    except Exception as e:
        assert 1 == 2
    finally:
        shutil.rmtree(tdir)
Beispiel #8
0
def test():
    homedir = os.path.dirname(os.path.abspath(__file__)) #where is this script?
    fatfile = os.path.join(homedir,'..','data','fatality.xml')
    ecofile = os.path.join(homedir,'..','data','economy.xml')
    cityfile = os.path.join(homedir,'..','data','cities1000.txt')
    event = 'northridge'
    shakefile = os.path.join(homedir,'..','data','eventdata',event,'%s_grid.xml' % event)
    popfile = os.path.join(homedir,'..','data','eventdata',event,'%s_gpw.flt' % event)
    isofile = os.path.join(homedir,'..','data','eventdata',event,'%s_isogrid.bil' % event)
    urbanfile = os.path.join(homedir,'..','data','eventdata','northridge','northridge_urban.bil')
    oceanfile = os.path.join(homedir,'..','data','eventdata','northridge','northridge_ocean.json')
    
    invfile = os.path.join(homedir,'..','data','semi_inventory.hdf')
    colfile = os.path.join(homedir,'..','data','semi_collapse_mmi.hdf')
    casfile = os.path.join(homedir,'..','data','semi_casualty.hdf')
    workfile = os.path.join(homedir,'..','data','semi_workforce.hdf')

    tdir = tempfile.mkdtemp()
    outfile = os.path.join(tdir,'output.pdf')
    pngfile,mapcities = draw_contour(shakefile,popfile,oceanfile,cityfile,outfile,make_png=True)
    shutil.rmtree(tdir)
    
    popyear = 2012

    semi = SemiEmpiricalFatality.fromDefault()
    semi.setGlobalFiles(popfile,popyear,urbanfile,isofile)
    semiloss,resfat,nonresfat = semi.getLosses(shakefile)
    
    popgrowth = PopulationGrowth.fromDefault()
    econexp = EconExposure(popfile,2012,isofile)
    fatmodel = EmpiricalLoss.fromDefaultFatality()
    expobject = Exposure(popfile,2012,isofile,popgrowth)
    
    expdict = expobject.calcExposure(shakefile)
    fatdict = fatmodel.getLosses(expdict)
    econexpdict = econexp.calcExposure(shakefile)
    ecomodel = EmpiricalLoss.fromDefaultEconomic()
    ecodict = ecomodel.getLosses(expdict)
    shakegrid = econexp.getShakeGrid()
    pagerversion = 1
    cities = Cities.loadFromGeoNames(cityfile)
    impact1 = '''Red alert level for economic losses. Extensive damage is probable 
    and the disaster is likely widespread. Estimated economic losses are less 
    than 1% of GDP of Italy. Past events with this alert level have required 
    a national or international level response.'''
    impact2 = '''Orange alert level for shaking-related fatalities. Significant 
    casualties are likely.'''
    structcomment = '''Overall, the population in this region resides in structures 
    that are a mix of vulnerable and earthquake resistant construction. The predominant 
    vulnerable building types are unreinforced brick with mud and mid-rise nonductile 
    concrete frame with infill construction.'''
    histeq = [1,2,3]
    struct_comment = '''Overall, the population in this region resides
    in structures that are resistant to earthquake
    shaking, though some vulnerable structures
    exist.'''
    secondary_comment = '''Recent earthquakes in this area have caused secondary hazards 
    such as landslides that might have contributed to losses.'''
    hist_comment = ''''A magnitude 7.1 earthquake 240 km east of this event struck Reventador: Ecuador 
    on March 6, 1987 (UTC), with estimated population exposures of 14,000 at intensity VIII and 2,000 
    at intensity IX or greater, resulting in a reported 5,000 fatalities.'''.replace('\n','')
    doc = PagerData()
    doc.setInputs(shakegrid,pagerversion,shakegrid.getEventDict()['event_id'])
    doc.setExposure(expdict,econexpdict)
    doc.setModelResults(fatmodel,ecomodel,
                        fatdict,ecodict,
                        semiloss,resfat,nonresfat)
    doc.setComments(impact1,impact2,struct_comment,hist_comment,secondary_comment)
    doc.setMapInfo(cityfile,mapcities)
    doc.validate()

    eventinfo = doc.getEventInfo()
    assert eventinfo['mag'] == shakegrid.getEventDict()['magnitude']
    
    imp1,imp2 = doc.getImpactComments()
    assert imp1 == impact1 and imp2 == impact2

    version = doc.getSoftwareVersion()
    elapsed = doc.getElapsed()

    exp = doc.getTotalExposure()
    assert np.isclose(np.array(exp),expdict['TotalExposure']).all()

    hist_table = doc.getHistoricalTable()
    assert hist_table[0]['EventID'] == '199206281505'

    scomm = doc.getStructureComment()
    assert scomm == struct_comment
    
    hcomm = doc.getHistoricalComment()
    assert hcomm == hist_comment

    citytable = doc.getCityTable()
    assert citytable.iloc[0]['name'] == 'Santa Clarita'

    summary = doc.getSummaryAlert()
    assert summary == 'yellow'
Beispiel #9
0
def main(pargs, config):
    # get the users home directory
    homedir = os.path.expanduser("~")

    # handle cancel messages
    if pargs.cancel:
        # we presume that pargs.gridfile in this context is an event ID.
        msg = _cancel(pargs.gridfile, config)
        print(msg)
        return True

    # what kind of thing is gridfile?
    is_file = os.path.isfile(pargs.gridfile)
    is_url, url_gridfile = _is_url(pargs.gridfile)
    is_pdl, pdl_gridfile = _check_pdl(pargs.gridfile, config)
    if is_file:
        gridfile = pargs.gridfile
    elif is_url:
        gridfile = url_gridfile
    elif is_pdl:
        gridfile = pdl_gridfile
    else:
        print("ShakeMap Grid file %s does not exist." % pargs.gridfile)
        return False

    pager_folder = os.path.join(homedir, config["output_folder"])
    pager_archive = os.path.join(homedir, config["archive_folder"])

    admin = PagerAdmin(pager_folder, pager_archive)

    # stdout will now be logged as INFO, stderr will be logged as WARNING
    mail_host = config["mail_hosts"][0]
    mail_from = config["mail_from"]
    developers = config["developers"]
    logfile = os.path.join(pager_folder, "pager.log")
    plog = PagerLogger(logfile, developers, mail_from, mail_host, debug=pargs.debug)
    logger = plog.getLogger()

    try:
        eid = None
        pager_version = None
        # get all the basic event information and print it, if requested
        shake_tuple = getHeaderData(gridfile)
        eid = shake_tuple[1]["event_id"].lower()
        etime = shake_tuple[1]["event_timestamp"]
        if not len(eid):
            eid = shake_tuple[0]["event_id"].lower()
        network = shake_tuple[1]["event_network"].lower()
        if network == "":
            network = "us"
        if not eid.startswith(network):
            eid = network + eid

        # Create a ComcatInfo object to hopefully tell us a number of things about this event
        try:
            ccinfo = ComCatInfo(eid)
            location = ccinfo.getLocation()
            tsunami = ccinfo.getTsunami()
            authid, allids = ccinfo.getAssociatedIds()
            authsource, othersources = ccinfo.getAssociatedSources()
        except:  # fail over to what we can determine locally
            location = shake_tuple[1]["event_description"]
            tsunami = shake_tuple[1]["magnitude"] >= TSUNAMI_MAG_THRESH
            authid = eid
            authsource = network
            allids = []

        # location field can be empty (None), which breaks a bunch of things
        if location is None:
            location = ""

        # Check to see if user wanted to override default tsunami criteria
        if pargs.tsunami != "auto":
            if pargs.tsunami == "on":
                tsunami = True
            else:
                tsunami = False

        # check to see if this event is a scenario
        is_scenario = False
        shakemap_type = shake_tuple[0]["shakemap_event_type"]
        if shakemap_type == "SCENARIO":
            is_scenario = True

        # if event is NOT a scenario and event time is in the future,
        # flag the event as a scenario and yell about it.
        if etime > datetime.datetime.utcnow():
            is_scenario = True
            logger.warning(
                "Event origin time is in the future! Flagging this as a scenario."
            )

        if is_scenario:
            if re.search("scenario", location.lower()) is None:
                location = "Scenario " + location

        # create the event directory (if it does not exist), and start logging there
        logger.info("Creating event directory")
        event_folder = admin.createEventFolder(authid, etime)

        # Stop processing if there is a "stop" file in the event folder
        stopfile = os.path.join(event_folder, "stop")
        if os.path.isfile(stopfile):
            fmt = '"stop" file found in %s.  Stopping processing, returning with 1.'
            logger.info(fmt % (event_folder))
            return True

        pager_version = get_pager_version(event_folder)
        version_folder = os.path.join(event_folder, "version.%03d" % pager_version)
        os.makedirs(version_folder)
        event_logfile = os.path.join(version_folder, "event.log")

        # this will turn off the global rotating log file
        # and switch to the one in the version folder.
        plog.setVersionHandler(event_logfile)

        # Copy the grid.xml file to the version folder
        # sometimes (usu when testing) the input grid isn't called grid.xml.  Rename it here.
        version_grid = os.path.join(version_folder, "grid.xml")
        shutil.copyfile(gridfile, version_grid)

        # Check to see if the tsunami flag has been previously set
        tsunami_toggle = {"on": 1, "off": 0}
        tsunami_file = os.path.join(event_folder, "tsunami")
        if os.path.isfile(tsunami_file):
            tsunami = tsunami_toggle[open(tsunami_file, "rt").read().strip()]

        # get the rest of the event info
        etime = shake_tuple[1]["event_timestamp"]
        elat = shake_tuple[1]["lat"]
        elon = shake_tuple[1]["lon"]
        emag = shake_tuple[1]["magnitude"]

        # get the year of the event
        event_year = shake_tuple[1]["event_timestamp"].year

        # find the population data collected most closely to the event_year
        pop_year, popfile = _get_pop_year(
            event_year, config["model_data"]["population_data"]
        )
        logger.info("Population year: %i Population file: %s\n" % (pop_year, popfile))

        # Get exposure results
        logger.info("Calculating population exposure.")
        isofile = config["model_data"]["country_grid"]
        expomodel = Exposure(popfile, pop_year, isofile)
        exposure = None
        exposure = expomodel.calcExposure(gridfile)

        # incidentally grab the country code of the epicenter
        numcode = expomodel._isogrid.getValue(elat, elon)
        if np.isnan(numcode):
            cdict = None
        else:
            cdict = Country().getCountry(int(numcode))
        if cdict is None:
            ccode = "UK"
        else:
            ccode = cdict["ISO2"]

        logger.info("Country code at epicenter is %s" % ccode)

        # get fatality results, if requested
        logger.info("Calculating empirical fatalities.")
        fatmodel = EmpiricalLoss.fromDefaultFatality()
        fatdict = fatmodel.getLosses(exposure)

        # get economic results, if requested
        logger.info("Calculating economic exposure.")
        econexpmodel = EconExposure(popfile, pop_year, isofile)
        ecomodel = EmpiricalLoss.fromDefaultEconomic()
        econexposure = econexpmodel.calcExposure(gridfile)
        ecodict = ecomodel.getLosses(econexposure)
        shakegrid = econexpmodel.getShakeGrid()

        # Get semi-empirical losses
        logger.info("Calculating semi-empirical fatalities.")
        urbanfile = config["model_data"]["urban_rural_grid"]
        if not os.path.isfile(urbanfile):
            raise PagerException("Urban-rural grid file %s does not exist." % urbanfile)

        semi = SemiEmpiricalFatality.fromDefault()
        semi.setGlobalFiles(popfile, pop_year, urbanfile, isofile)
        semiloss, resfat, nonresfat = semi.getLosses(gridfile)

        # get all of the other components of PAGER
        logger.info("Getting all comments.")
        # get the fatality and economic comments
        impact1, impact2 = get_impact_comments(
            fatdict, ecodict, econexposure, event_year, ccode
        )
        # get comment describing vulnerable structures in the region.
        struct_comment = get_structure_comment(resfat, nonresfat, semi)
        # get the comment describing historic secondary hazards
        secondary_comment = get_secondary_comment(elat, elon, emag)
        # get the comment describing historical comments in the region
        historical_comment = get_historical_comment(elat, elon, emag, exposure, fatdict)

        # generate the probability plots
        logger.info("Drawing probability plots.")
        fat_probs_file, eco_probs_file = _draw_probs(
            fatmodel, fatdict, ecomodel, ecodict, version_folder
        )

        # generate the exposure map
        exposure_base = os.path.join(version_folder, "exposure")
        logger.info("Generating exposure map...")
        oceanfile = config["model_data"]["ocean_vectors"]
        oceangrid = config["model_data"]["ocean_grid"]
        cityfile = config["model_data"]["city_file"]
        borderfile = config["model_data"]["border_vectors"]
        shake_grid = expomodel.getShakeGrid()
        pop_grid = expomodel.getPopulationGrid()
        pdf_file, png_file, mapcities = draw_contour(
            shake_grid,
            pop_grid,
            oceanfile,
            oceangrid,
            cityfile,
            exposure_base,
            borderfile,
            is_scenario=is_scenario,
        )
        logger.info("Generated exposure map %s" % pdf_file)

        # figure out whether this event has been "released".
        is_released = _get_release_status(
            pargs,
            config,
            fatmodel,
            fatdict,
            ecomodel,
            ecodict,
            shake_tuple,
            event_folder,
        )

        # Create a data object to encapsulate everything we know about the PAGER
        # results, and then serialize that to disk in the form of a number of JSON files.
        logger.info("Making PAGER Data object.")
        doc = PagerData()
        timezone_file = config["model_data"]["timezones_file"]
        elapsed = pargs.elapsed
        doc.setInputs(
            shakegrid,
            timezone_file,
            pager_version,
            shakegrid.getEventDict()["event_id"],
            authid,
            tsunami,
            location,
            is_released,
            elapsed=elapsed,
        )
        logger.info("Setting inputs.")
        doc.setExposure(exposure, econexposure)
        logger.info("Setting exposure.")
        doc.setModelResults(
            fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat
        )
        logger.info("Setting comments.")
        doc.setComments(
            impact1, impact2, struct_comment, historical_comment, secondary_comment
        )
        logger.info("Setting map info.")
        doc.setMapInfo(cityfile, mapcities)
        logger.info("Validating.")
        doc.validate()

        # if we have determined that the event is a scenario (origin time is in the future)
        # and the shakemap is not flagged as such, set the shakemap type in the
        # pagerdata object to be 'SCENARIO'.
        if is_scenario:
            doc.setToScenario()

        json_folder = os.path.join(version_folder, "json")
        os.makedirs(json_folder)
        logger.info("Saving output to JSON.")
        doc.saveToJSON(json_folder)
        logger.info("Saving output to XML.")
        doc.saveToLegacyXML(version_folder)

        logger.info("Creating onePAGER pdf...")
        onepager_pdf, error = create_onepager(doc, version_folder)
        if onepager_pdf is None:
            raise PagerException("Could not create onePAGER output: \n%s" % error)

        # copy the contents.xml file to the version folder
        contentsfile = get_data_path("contents.xml")
        if contentsfile is None:
            raise PagerException("Could not find contents.xml file.")
        shutil.copy(contentsfile, version_folder)

        # send pdf as attachment to internal team of PAGER users
        if not is_released and not is_scenario:
            message_pager(config, onepager_pdf, doc)

        # run transfer, as appropriate and as specified by config
        # the PAGER product eventsource and eventsourcecode should
        # match the input ShakeMap settings for these properties.
        # This can possibly cause confusion if a regional ShakeMap is
        # trumped with one from NEIC, but this should happen less often
        # than an NEIC origin being made authoritative over a regional one.
        eventsource = network
        eventsourcecode = eid
        res, msg = transfer(
            config,
            doc,
            eventsourcecode,
            eventsource,
            version_folder,
            is_scenario=is_scenario,
        )
        logger.info(msg)
        if not res:
            logger.critical('Error transferring PAGER content. "%s"' % msg)

        print("Created onePAGER pdf %s" % onepager_pdf)
        logger.info("Created onePAGER pdf %s" % onepager_pdf)

        logger.info("Done.")
        return True
    except Exception as e:
        f = io.StringIO()
        traceback.print_exc(file=f)
        msg = e
        msg = "%s\n %s" % (str(msg), f.getvalue())
        hostname = socket.gethostname()
        msg = msg + "\n" + "Error occurred on %s\n" % (hostname)
        if gridfile is not None:
            msg = msg + "\n" + "Error on file: %s\n" % (gridfile)
        if eid is not None:
            msg = msg + "\n" + "Error on event: %s\n" % (eid)
        if pager_version is not None:
            msg = msg + "\n" + "Error on version: %i\n" % (pager_version)
        f.close()
        logger.critical(msg)
        logger.info("Sent error to email")
        return False