def test_godt_2008():
    conf_file = os.path.join(upone, 'defaultconfigfiles', 'models',
                             'godt_2008.ini')
    conf = ConfigObj(conf_file)
    conf['godt_2008']['divfactor'] = '1.'
    data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs')
    conf = correct_config_filepaths(data_path, conf)
    shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    maplayers = godt2008(shakefile, conf)

    pgrid = maplayers['model']['grid']
    test_data = pgrid.getData()

    if changetarget:
        # To change target data:
        pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict())
        pgrd.save(
            os.path.join(datadir, 'loma_prieta', 'targets', 'godt_2008.grd'))

    # Load target
    target_file = os.path.join(datadir, 'loma_prieta', 'targets',
                               'godt_2008.grd')
    target_grid = GMTGrid.load(target_file)
    target_data = target_grid.getData()

    # Assert
    np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
def test_zhu2015():
    conf_file = os.path.join(upone, 'defaultconfigfiles', 'models',
                             'zhu_2015.ini')
    conf = ConfigObj(conf_file)
    data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs')
    # Check slopefile trimming
    conf['zhu_2015']['slopefile'] = 'global_gted_maxslope_30c.flt'
    conf = correct_config_filepaths(data_path, conf)
    # Run with divfactor of 1
    conf['zhu_2015']['divfactor'] = '1.'

    shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    lm = LM.LogisticModel(shakefile, conf, saveinputs=True)
    maplayers = lm.calculate()

    pgrid = maplayers['model']['grid']
    test_data = pgrid.getData()

    if changetarget:
        # To change target data:
        pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict())
        pgrd.save(
            os.path.join(datadir, 'loma_prieta', 'targets', 'zhu2015.grd'))

    # Load target
    target_file = os.path.join(datadir, 'loma_prieta', 'targets',
                               'zhu2015.grd')
    target_grid = GMTGrid.load(target_file)
    target_data = target_grid.getData()
    # Assert
    np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
def test_jessee_2018():
    conf_file = os.path.join(upone, 'defaultconfigfiles', 'models',
                             'jessee_2018.ini')
    conf = ConfigObj(conf_file)
    data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs')
    conf = correct_config_filepaths(data_path, conf)
    shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    undertainty_file = os.path.join(datadir, 'loma_prieta', 'uncertainty.xml')

    lm = LM.LogisticModel(shakefile,
                          conf,
                          saveinputs=True,
                          uncertfile=undertainty_file)
    maplayers = lm.calculate()

    pgrid = maplayers['model']['grid']
    stdgrid = maplayers['std']['grid']
    test_data = pgrid.getData()
    test_data_std = stdgrid.getData()

    if changetarget:
        # To change target data:
        pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict())
        pgrd.save(
            os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018.grd'))
        stdgrd = GMTGrid(stdgrid.getData(), stdgrid.getGeoDict())
        stdgrd.save(
            os.path.join(datadir, 'loma_prieta', 'targets',
                         'jessee_2018_std.grd'))

    # Load target
    target_file = os.path.join(datadir, 'loma_prieta', 'targets',
                               'jessee_2018.grd')
    target_grid = GMTGrid.load(target_file)
    target_data = target_grid.getData()
    std_file = os.path.join(datadir, 'loma_prieta', 'targets',
                            'jessee_2018_std.grd')
    target_grid_std = GMTGrid.load(std_file)
    target_data_std = target_grid_std.getData()

    # Assert
    np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
    np.testing.assert_allclose(target_data_std, test_data_std, rtol=1e-3)
Example #4
0
def basic_test():

    mmidata = np.array([[7, 8, 8, 8, 7], [8, 9, 9, 9, 8], [8, 9, 10, 9, 8],
                        [8, 9, 9, 8, 8], [7, 8, 8, 6, 5]],
                       dtype=np.float32)
    popdata = np.ones_like(mmidata) * 1e7
    isodata = np.array(
        [[4, 4, 4, 4, 4], [4, 4, 4, 4, 4], [4, 4, 156, 156, 156],
         [156, 156, 156, 156, 156], [156, 156, 156, 156, 156]],
        dtype=np.int32)

    shakefile = get_temp_file_name()
    popfile = get_temp_file_name()
    isofile = get_temp_file_name()
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })
    layers = OrderedDict([
        ('mmi', mmidata),
    ])
    event_dict = {
        'event_id': 'us12345678',
        'magnitude': 7.8,
        'depth': 10.0,
        'lat': 34.123,
        'lon': -118.123,
        'event_timestamp': datetime.utcnow(),
        'event_description': 'foo',
        'event_network': 'us'
    }
    shake_dict = {
        'event_id': 'us12345678',
        'shakemap_id': 'us12345678',
        'shakemap_version': 1,
        'code_version': '4.5',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'ACTUAL'
    }
    unc_dict = {'mmi': (1, 1)}
    shakegrid = ShakeGrid(layers, geodict, event_dict, shake_dict, unc_dict)
    shakegrid.save(shakefile)
    popgrid = GMTGrid(popdata, geodict.copy())
    isogrid = GMTGrid(isodata, geodict.copy())
    popgrid.save(popfile)
    isogrid.save(isofile)

    ratedict = {
        4: {
            'start': [2010, 2012, 2014, 2016],
            'end': [2012, 2014, 2016, 2018],
            'rate': [0.01, 0.02, 0.03, 0.04]
        },
        156: {
            'start': [2010, 2012, 2014, 2016],
            'end': [2012, 2014, 2016, 2018],
            'rate': [0.02, 0.03, 0.04, 0.05]
        }
    }

    popgrowth = PopulationGrowth(ratedict)
    popyear = datetime.utcnow().year
    exposure = Exposure(popfile, popyear, isofile, popgrowth=popgrowth)
    expdict = exposure.calcExposure(shakefile)

    modeldict = [
        LognormalModel('AF', 11.613073, 0.180683, 1.0),
        LognormalModel('CN', 10.328811, 0.100058, 1.0)
    ]
    fatmodel = EmpiricalLoss(modeldict)

    # for the purposes of this test, let's override the rates
    # for Afghanistan and China with simpler numbers.
    fatmodel.overrideModel(
        'AF',
        np.array([0, 0, 0, 0, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 0],
                 dtype=np.float32))
    fatmodel.overrideModel(
        'CN',
        np.array([0, 0, 0, 0, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 0],
                 dtype=np.float32))

    print('Testing very basic fatality calculation...')
    fatdict = fatmodel.getLosses(expdict)
    # strictly speaking, the afghanistant fatalities should be 462,000 but floating point precision dictates otherwise.
    testdict = {'CN': 46111, 'AF': 461999, 'TotalFatalities': 508110}
    for key, value in fatdict.items():
        assert value == testdict[key]
    print('Passed very basic fatality calculation...')

    print('Testing grid fatality calculations...')
    mmidata = exposure.getShakeGrid().getLayer('mmi').getData()
    popdata = exposure.getPopulationGrid().getData()
    isodata = exposure.getCountryGrid().getData()
    fatgrid = fatmodel.getLossGrid(mmidata, popdata, isodata)

    assert np.nansum(fatgrid) == 508111
    print('Passed grid fatality calculations...')

    # Testing modifying rates and stuffing them back in...
    chile = LognormalModel('CL', 19.786773, 0.259531, 0.0)
    rates = chile.getLossRates(np.arange(5, 10))
    modrates = rates * 2  # does this make event twice as deadly?

    # roughly the exposures from 2015-9-16 CL event
    expo_pop = np.array(
        [0, 0, 0, 1047000, 7314000, 1789000, 699000, 158000, 0, 0])
    mmirange = np.arange(5, 10)
    chile_deaths = chile.getLosses(expo_pop[4:9], mmirange)
    chile_double_deaths = chile.getLosses(expo_pop[4:9],
                                          mmirange,
                                          rates=modrates)
    print('Chile model fatalities: %f' % chile_deaths)
    print('Chile model x2 fatalities: %f' % chile_double_deaths)
Example #5
0
def model_test_simple():
    A = 4  #ccode for afghanistan
    J = 392  #ccode for japan
    R = 1  #rural code
    U = 2  #urban code
    #create a 5x5 population data set with 1000 people in each cell
    popdata = np.ones((5, 5)) * 1000.0
    #create a mixed grid of afghanistan and japan (have very different inventory,collapse, and fatality rates.)
    isodata = np.array([[A, A, A, A, A], [A, A, A, A, A], [A, A, A, J, J],
                        [J, J, J, J, J], [J, J, J, J, J]],
                       dtype=np.int16)
    #make a mix of urban and rural cells
    urbdata = np.array([[R, R, R, R, R], [R, U, U, U, R], [R, U, U, U, U],
                        [U, U, U, R, R], [R, R, R, R, R]],
                       dtype=np.int16)
    mmidata = np.array([[6, 7, 8, 9, 6], [7, 8, 9, 6, 7], [8, 9, 6, 6, 7],
                        [8, 9, 6, 7, 8], [9, 6, 7, 8, 9]],
                       dtype=np.float32)
    homedir = os.path.dirname(
        os.path.abspath(__file__))  #where is this script?
    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    fatfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')
    growthfile = os.path.join(homedir, '..', 'data',
                              'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls')
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })

    popgrid = GMTGrid(popdata, geodict)
    isogrid = GMTGrid(isodata, geodict)
    urbgrid = GMTGrid(urbdata, geodict)
    popyear = 2016
    layers = {'mmi': mmidata}
    eventdict = {
        'event_id': '1234',
        'magnitude': 7.5,
        'lat': 34.2,
        'lon': 118.2,
        'depth': 10.0,
        'event_timestamp': datetime(2016, 1, 1, 0, 0, 0),
        'event_description': 'test data',
        'event_network': 'us'
    }
    shakedict = {
        'event_id': '1234',
        'shakemap_id': '1234',
        'shakemap_version': 1,
        'code_version': '1.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'SCENARIO'
    }
    uncdict = {'mmi': (1.0, 1)}
    mmigrid = ShakeGrid(layers, geodict, eventdict, shakedict, uncdict)

    popfile = isofile = urbfile = shakefile = ''
    try:
        #make some temporary files
        f, popfile = tempfile.mkstemp()
        os.close(f)
        f, isofile = tempfile.mkstemp()
        os.close(f)
        f, urbfile = tempfile.mkstemp()
        os.close(f)
        f, shakefile = tempfile.mkstemp()
        os.close(f)

        popgrid.save(popfile)
        isogrid.save(isofile)
        urbgrid.save(urbfile)
        mmigrid.save(shakefile)

        semi = SemiEmpiricalFatality.fromDefault()
        losses, resfat, nonresfat = semi.getLosses(shakefile)
        assert losses == 85
        print('Semi-empirical model calculations appear to be done correctly.')
    except:
        print(
            'There is an error attempting to do semi-empirical loss calculations.'
        )
    finally:
        files = [popfile, isofile, urbfile, shakefile]
        for fname in files:
            if os.path.isfile(fname):
                os.remove(fname)
Example #6
0
def make_test_semi_model(ccode, timeofday, density, popvalue, mmi):
    """Run the semi-empirical model for a single value of input.  Intended for testing purposes.

    :param ccode:
      Two letter ISO country code ('US', 'JP', etc.) to be used to extract inventory, collapse rates, etc.
    :param timeofday:
      One of 'day','night' - used to determine residential/non-residental population distribution and casualty rates.
    :param density:
      One of semimodel.URBAN (2) or semimodel.RURAL (1).
    :param popvalue:
      Scalar population value to multiply by inventory, collapse, and fatality rates.
    :param mmi:
      MMI value used to extract collapse rates in given country code.
    :returns:
      Tuple of:
        1) Total number of fatalities
        2) Dictionary of residential fatalities per building type, per country.
        3) Dictionary of non-residential fatalities per building type, per country.
    """
    country = Country()
    cdict = country.getCountry(ccode)
    ucode = cdict['ISON']
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })
    if timeofday == 'day':
        etime = datetime(2016, 1, 1, 12, 0, 0)  #noon
    elif timeofday == 'transit':
        etime = datetime(2016, 1, 1, 18, 0, 0)  #6 pm
    else:
        etime = datetime(2016, 1, 1, 0, 0, 0)  #midnight
    eventdict = {
        'event_id': '1234',
        'magnitude': 7.5,
        'lat': 0.0,
        'lon': 0.0,
        'depth': 10.0,
        'event_timestamp': etime,
        'event_description': 'test data',
        'event_network': 'us'
    }
    shakedict = {
        'event_id': '1234',
        'shakemap_id': '1234',
        'shakemap_version': 1,
        'code_version': '1.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'SCENARIO'
    }
    uncdict = {'mmi': (1.0, 1)}
    popdata = np.ones((2, 2), dtype=np.float32) * (popvalue) / 4
    isodata = np.ones((2, 2), dtype=np.int16) * ucode
    urbdata = np.ones((2, 2), dtype=np.int16) * density
    mmidata = np.ones((2, 2), dtype=np.float32) * mmi
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 1.5,
        'ymin': 0.5,
        'ymax': 1.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 2,
        'ny': 2
    })
    popgrid = GMTGrid(popdata, geodict)
    isogrid = GMTGrid(isodata, geodict)
    urbgrid = GMTGrid(urbdata, geodict)
    popyear = 2016
    layers = {'mmi': mmidata}
    mmigrid = ShakeGrid(layers, geodict, eventdict, shakedict, uncdict)
    popfile = isofile = urbfile = shakefile = ''
    popsum = None
    newresfat = None
    newnresfat = None
    try:
        #make some temporary files
        f, popfile = tempfile.mkstemp()
        os.close(f)
        f, isofile = tempfile.mkstemp()
        os.close(f)
        f, urbfile = tempfile.mkstemp()
        os.close(f)
        f, shakefile = tempfile.mkstemp()
        os.close(f)

        popgrid.save(popfile)
        isogrid.save(isofile)
        urbgrid.save(urbfile)
        mmigrid.save(shakefile)

        semi = SemiEmpiricalFatality.fromDefault()
        semi.setGlobalFiles(popfile, popyear, urbfile, isofile)
        t, resfat, nonresfat = semi.getLosses(shakefile)
        popsum = 0
        newresfat = {ccode: {}}
        newnonresfat = {ccode: {}}
        for key, value in resfat[ccode].items():
            if value < 1:
                value = np.floor(value)
            newresfat[ccode][key] = value / 4.0
            popsum += value / 4.0
        for key, value in nonresfat[ccode].items():
            newnonresfat[ccode][key] = value / 4.0
            if value < 1:
                value = np.floor(value)
            popsum += value / 4.0
        popsum = int(popsum)
    finally:
        files = [popfile, isofile, urbfile, shakefile]
        for fname in files:
            if os.path.isfile(fname):
                os.remove(fname)
    return (popsum, newresfat, newnonresfat)