Beispiel #1
0
def test_interpolate():
    print('Testing ShakeGrid interpolate() method...')
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 6.5,
        'ymin': 1.5,
        'ymax': 6.5,
        'dx': 1.0,
        'dy': 1.0,
        'ny': 6,
        'nx': 7
    })
    data = np.arange(14, 56).reshape(6, 7)
    layers = OrderedDict()
    layers['pga'] = data
    shakeDict = {
        'event_id': 'usabcd1234',
        'shakemap_id': 'usabcd1234',
        'shakemap_version': 1,
        'code_version': '4.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'ACTUAL'
    }
    eventDict = {
        'event_id': 'usabcd1234',
        'magnitude': 7.6,
        'depth': 1.4,
        'lat': 2.0,
        'lon': 2.0,
        'event_timestamp': datetime.utcnow(),
        'event_network': 'us',
        'event_description': 'sample event'
    }
    uncDict = {'pga': (0.0, 0)}
    shake = ShakeGrid(layers, geodict, eventDict, shakeDict, uncDict)
    sampledict = GeoDict({
        'xmin': 3.0,
        'xmax': 4.0,
        'ymin': 3.0,
        'ymax': 4.0,
        'dx': 1.0,
        'dy': 1.0,
        'ny': 2,
        'nx': 2
    })
    shake2 = shake.interpolateToGrid(sampledict, method='linear')
    output = np.array([[34., 35.], [41., 42.]])
    np.testing.assert_almost_equal(output, shake2.getLayer('pga').getData())
    print('Passed test of ShakeGrid interpolate() method.')
Beispiel #2
0
def test_modify():
    print('Testing ShakeGrid interpolate() method...')
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 6.5,
        'ymin': 1.5,
        'ymax': 6.5,
        'dx': 1.0,
        'dy': 1.0,
        'ny': 6,
        'nx': 7
    })
    data = np.arange(14, 56).reshape(6, 7)
    layers = OrderedDict()
    layers['pga'] = data
    shakeDict = {
        'event_id': 'usabcd1234',
        'shakemap_id': 'usabcd1234',
        'shakemap_version': 1,
        'code_version': '4.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'ACTUAL'
    }
    eventDict = {
        'event_id': 'usabcd1234',
        'magnitude': 7.6,
        'depth': 1.4,
        'lat': 2.0,
        'lon': 2.0,
        'event_timestamp': datetime.utcnow(),
        'event_network': 'us',
        'event_description': 'sample event'
    }
    uncDict = {'pga': (0.0, 0)}
    shake = ShakeGrid(layers, geodict, eventDict, shakeDict, uncDict)
    rdata = np.random.rand(data.shape[0], data.shape[1])
    shake.setLayer('pga', rdata)
    newdata = shake.getLayer('pga').getData()
    np.testing.assert_almost_equal(rdata, newdata)
Beispiel #3
0
    def execute(self):
        """Create grid.xml and uncertainty.xml files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        logger = logging.getLogger(__name__)
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get all of the grid layers and the geodict
        if container.getDataType() != 'grid':
            raise NotImplementedError('gridxml module can only function on '
                                      'gridded data, not sets of points')
        gridnames = container.getIMTs(COMPONENT)
        xml_types = ['grid', 'uncertainty']
        for xml_type in xml_types:
            layers = OrderedDict()
            field_keys = OrderedDict()
            for gridname in gridnames:
                imt_field = _oq_to_gridxml(gridname)
                imtdict = container.getIMTGrids(gridname, COMPONENT)
                if xml_type == 'grid':
                    grid = imtdict['mean']
                    metadata = imtdict['mean_metadata']
                elif xml_type == 'uncertainty':
                    grid = imtdict['std']
                    metadata = imtdict['std_metadata']

                units = metadata['units']
                digits = metadata['digits']
                grid_data = grid.getData()
                # convert from HDF units to legacy grid.xml units
                if xml_type == 'grid':
                    if units == 'ln(cm/s)':
                        grid_data = np.exp(grid_data)
                        units = 'cm/s'
                    elif units == 'ln(g)':
                        grid_data = np.exp(grid_data) * 100
                        units = '%g'
                    else:
                        pass

                if xml_type == 'grid':
                    layers[imt_field] = grid_data
                    field_keys[imt_field] = (units, digits)
                else:
                    layers['STD' + imt_field] = grid_data
                    field_keys['STD' + imt_field] = (units, digits)

            geodict = grid.getGeoDict()

            config = container.getConfig()

            # event dictionary
            info = container.getMetadata()
            event_info = info['input']['event_information']
            event_dict = {}
            event_dict['event_id'] = event_info['event_id']
            event_dict['magnitude'] = float(event_info['magnitude'])
            event_dict['depth'] = float(event_info['depth'])
            event_dict['lat'] = float(event_info['latitude'])
            event_dict['lon'] = float(event_info['longitude'])
            event_dict['event_timestamp'] = datetime.strptime(
                event_info['origin_time'], TIMEFMT)
            event_dict['event_description'] = event_info['location']
            event_dict['event_network'] = \
                info['input']['event_information']['eventsource']

            # shake dictionary
            shake_dict = {}
            shake_dict['event_id'] = event_dict['event_id']
            shake_dict['shakemap_id'] = event_dict['event_id']
            shake_dict['shakemap_version'] = \
                info['processing']['shakemap_versions']['map_version']
            shake_dict['code_version'] = shakemap.__version__
            ptime = info['processing']['shakemap_versions']['process_time']
            shake_dict['process_timestamp'] = datetime.strptime(ptime, TIMEFMT)
            shake_dict['shakemap_originator'] = \
                config['system']['source_network']
            shake_dict['map_status'] = config['system']['map_status']
            shake_dict['shakemap_event_type'] = 'ACTUAL'
            if event_dict['event_id'].endswith('_se'):
                shake_dict['shakemap_event_type'] = 'SCENARIO'

            shake_grid = ShakeGrid(
                layers, geodict, event_dict,
                shake_dict, {}, field_keys=field_keys)
            fname = os.path.join(datadir, '%s.xml' % xml_type)
            logger.debug('Saving IMT grids to %s' % fname)
            shake_grid.save(fname)  # TODO - set grid version number

        container.close()
Beispiel #4
0
def test_save():
    tdir = tempfile.mkdtemp()
    testfile = os.path.join(tdir, 'test.xml')
    try:
        print('Testing save/read functionality for shakemap grids...')
        pga = np.arange(0, 16, dtype=np.float32).reshape(4, 4)
        pgv = np.arange(1, 17, dtype=np.float32).reshape(4, 4)
        mmi = np.arange(2, 18, dtype=np.float32).reshape(4, 4)
        geodict = GeoDict({
            'xmin': 0.5,
            'xmax': 3.5,
            'ymin': 0.5,
            'ymax': 3.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 4,
            'nx': 4
        })
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        shakeDict = {
            'event_id': 'usabcd1234',
            'shakemap_id': 'usabcd1234',
            'shakemap_version': 1,
            'code_version': '4.0',
            'process_timestamp': datetime.utcnow(),
            'shakemap_originator': 'us',
            'map_status': 'RELEASED',
            'shakemap_event_type': 'ACTUAL'
        }
        eventDict = {
            'event_id': 'usabcd1234',
            'magnitude': 7.6,
            'depth': 1.4,
            'lat': 2.0,
            'lon': 2.0,
            'event_timestamp': datetime.utcnow(),
            'event_network': 'us',
            'event_description': 'sample event'
        }
        uncDict = {'pga': (0.0, 0), 'pgv': (0.0, 0), 'mmi': (0.0, 0)}
        shake = ShakeGrid(layers, geodict, eventDict, shakeDict, uncDict)

        print('Testing save/read functionality...')
        shake.save(testfile, version=3)
        shake2 = ShakeGrid.load(testfile)
        for layer in ['pga', 'pgv', 'mmi']:
            tdata = shake2.getLayer(layer).getData()
            np.testing.assert_almost_equal(tdata, layers[layer])

        print('Passed save/read functionality for shakemap grids.')

        print('Testing getFileGeoDict method...')
        fgeodict = ShakeGrid.getFileGeoDict(testfile)
        print('Passed save/read functionality for shakemap grids.')

        print('Testing loading with bounds (no resampling or padding)...')
        sampledict = GeoDict({
            'xmin': -0.5,
            'xmax': 3.5,
            'ymin': -0.5,
            'ymax': 3.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 5,
            'nx': 5
        })
        shake3 = ShakeGrid.load(testfile,
                                samplegeodict=sampledict,
                                resample=False,
                                doPadding=False,
                                padValue=np.nan)
        tdata = shake3.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata, layers['pga'])

        print('Passed loading with bounds (no resampling or padding)...')

        print('Testing loading shakemap with padding, no resampling...')
        newdict = GeoDict({
            'xmin': -0.5,
            'xmax': 4.5,
            'ymin': -0.5,
            'ymax': 4.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 6,
            'nx': 6
        })
        shake4 = ShakeGrid.load(testfile,
                                samplegeodict=newdict,
                                resample=False,
                                doPadding=True,
                                padValue=np.nan)
        output = np.array([[np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
                           [np.nan, 0.0, 1.0, 2.0, 3.0, np.nan],
                           [np.nan, 4.0, 5.0, 6.0, 7.0, np.nan],
                           [np.nan, 8.0, 9.0, 10.0, 11.0, np.nan],
                           [np.nan, 12.0, 13.0, 14.0, 15.0, np.nan],
                           [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]])
        tdata = shake4.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata, output)
        print('Passed loading shakemap with padding, no resampling...')

        #make a bigger grid
        pga = np.arange(0, 36, dtype=np.float32).reshape(6, 6)
        pgv = np.arange(1, 37, dtype=np.float32).reshape(6, 6)
        mmi = np.arange(2, 38, dtype=np.float32).reshape(6, 6)
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        geodict = GeoDict({
            'xmin': 0.5,
            'xmax': 5.5,
            'ymin': 0.5,
            'ymax': 5.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 6,
            'nx': 6
        })
        shake = ShakeGrid(layers, geodict, eventDict, shakeDict, uncDict)
        shake.save(testfile, version=3)

        print('Testing resampling, no padding...')
        littledict = GeoDict({
            'xmin': 2.0,
            'xmax': 4.0,
            'ymin': 2.0,
            'ymax': 4.0,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 3,
            'nx': 3
        })
        shake5 = ShakeGrid.load(testfile,
                                samplegeodict=littledict,
                                resample=True,
                                doPadding=False,
                                padValue=np.nan)
        output = np.array([[10.5, 11.5, 12.5], [16.5, 17.5, 18.5],
                           [22.5, 23.5, 24.5]])
        tdata = shake5.getLayer('pga').getData()
        np.testing.assert_almost_equal(tdata, output)
        print('Passed resampling, no padding...')

        print('Testing resampling and padding...')
        pga = np.arange(0, 16, dtype=np.float32).reshape(4, 4)
        pgv = np.arange(1, 17, dtype=np.float32).reshape(4, 4)
        mmi = np.arange(2, 18, dtype=np.float32).reshape(4, 4)
        geodict = GeoDict({
            'xmin': 0.5,
            'ymax': 3.5,
            'ymin': 0.5,
            'xmax': 3.5,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 4,
            'nx': 4
        })
        layers = OrderedDict()
        layers['pga'] = pga
        layers['pgv'] = pgv
        layers['mmi'] = mmi
        shake = ShakeGrid(layers, geodict, eventDict, shakeDict, uncDict)
        shake.save(testfile, version=3)
        bigdict = GeoDict({
            'xmin': 0.0,
            'xmax': 4.0,
            'ymin': 0.0,
            'ymax': 4.0,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 5,
            'nx': 5
        })
        shake6 = ShakeGrid.load(testfile,
                                samplegeodict=bigdict,
                                resample=True,
                                doPadding=True,
                                padValue=np.nan)
        tdata = shake6.getLayer('pga').getData()
        output = np.array([[np.nan, np.nan, np.nan, np.nan, np.nan],
                           [np.nan, 2.5, 3.5, 4.5, np.nan],
                           [np.nan, 6.5, 7.5, 8.5, np.nan],
                           [np.nan, 10.5, 11.5, 12.5, np.nan],
                           [np.nan, np.nan, np.nan, np.nan, np.nan]])
        np.testing.assert_almost_equal(tdata, output)
        print('Passed resampling and padding...')
    except Exception as error:
        print('Failed to read grid.xml format file "%s". Error "%s".' %
              (xmlfile, str(error)))
        assert 0 == 1
    finally:
        if os.path.isdir(tdir):
            shutil.rmtree(tdir)
Beispiel #5
0
def basic_test():

    mmidata = np.array([[7, 8, 8, 8, 7], [8, 9, 9, 9, 8], [8, 9, 10, 9, 8],
                        [8, 9, 9, 8, 8], [7, 8, 8, 6, 5]],
                       dtype=np.float32)
    popdata = np.ones_like(mmidata) * 1e7
    isodata = np.array(
        [[4, 4, 4, 4, 4], [4, 4, 4, 4, 4], [4, 4, 156, 156, 156],
         [156, 156, 156, 156, 156], [156, 156, 156, 156, 156]],
        dtype=np.int32)

    shakefile = get_temp_file_name()
    popfile = get_temp_file_name()
    isofile = get_temp_file_name()
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })
    layers = OrderedDict([
        ('mmi', mmidata),
    ])
    event_dict = {
        'event_id': 'us12345678',
        'magnitude': 7.8,
        'depth': 10.0,
        'lat': 34.123,
        'lon': -118.123,
        'event_timestamp': datetime.utcnow(),
        'event_description': 'foo',
        'event_network': 'us'
    }
    shake_dict = {
        'event_id': 'us12345678',
        'shakemap_id': 'us12345678',
        'shakemap_version': 1,
        'code_version': '4.5',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'ACTUAL'
    }
    unc_dict = {'mmi': (1, 1)}
    shakegrid = ShakeGrid(layers, geodict, event_dict, shake_dict, unc_dict)
    shakegrid.save(shakefile)
    popgrid = Grid2D(popdata, geodict.copy())
    isogrid = Grid2D(isodata, geodict.copy())
    write(popgrid, popfile, 'netcdf')
    write(isogrid, isofile, 'netcdf')

    ratedict = {
        4: {
            'start': [2010, 2012, 2014, 2016],
            'end': [2012, 2014, 2016, 2018],
            'rate': [0.01, 0.02, 0.03, 0.04]
        },
        156: {
            'start': [2010, 2012, 2014, 2016],
            'end': [2012, 2014, 2016, 2018],
            'rate': [0.02, 0.03, 0.04, 0.05]
        }
    }

    popgrowth = PopulationGrowth(ratedict)
    popyear = datetime.utcnow().year
    exposure = Exposure(popfile, popyear, isofile, popgrowth=popgrowth)
    expdict = exposure.calcExposure(shakefile)

    modeldict = [
        LognormalModel('AF', 11.613073, 0.180683, 1.0),
        LognormalModel('CN', 10.328811, 0.100058, 1.0)
    ]
    fatmodel = EmpiricalLoss(modeldict)

    # for the purposes of this test, let's override the rates
    # for Afghanistan and China with simpler numbers.
    fatmodel.overrideModel(
        'AF',
        np.array([0, 0, 0, 0, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 0],
                 dtype=np.float32))
    fatmodel.overrideModel(
        'CN',
        np.array([0, 0, 0, 0, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 0],
                 dtype=np.float32))

    print('Testing very basic fatality calculation...')
    fatdict = fatmodel.getLosses(expdict)
    # strictly speaking, the afghanistant fatalities should be 462,000 but floating point precision dictates otherwise.
    testdict = {'CN': 46111, 'AF': 461999, 'TotalFatalities': 508110}
    for key, value in fatdict.items():
        assert value == testdict[key]
    print('Passed very basic fatality calculation...')

    print('Testing grid fatality calculations...')
    mmidata = exposure.getShakeGrid().getLayer('mmi').getData()
    popdata = exposure.getPopulationGrid().getData()
    isodata = exposure.getCountryGrid().getData()
    fatgrid = fatmodel.getLossGrid(mmidata, popdata, isodata)

    assert np.nansum(fatgrid) == 508111
    print('Passed grid fatality calculations...')

    # Testing modifying rates and stuffing them back in...
    chile = LognormalModel('CL', 19.786773, 0.259531, 0.0)
    rates = chile.getLossRates(np.arange(5, 10))
    modrates = rates * 2  # does this make event twice as deadly?

    # roughly the exposures from 2015-9-16 CL event
    expo_pop = np.array(
        [0, 0, 0, 1047000, 7314000, 1789000, 699000, 158000, 0, 0])
    mmirange = np.arange(5, 10)
    chile_deaths = chile.getLosses(expo_pop[4:9], mmirange)
    chile_double_deaths = chile.getLosses(expo_pop[4:9],
                                          mmirange,
                                          rates=modrates)
    print('Chile model fatalities: %f' % chile_deaths)
    print('Chile model x2 fatalities: %f' % chile_double_deaths)
Beispiel #6
0
def model_test_simple():
    A = 4  #ccode for afghanistan
    J = 392  #ccode for japan
    R = 1  #rural code
    U = 2  #urban code
    #create a 5x5 population data set with 1000 people in each cell
    popdata = np.ones((5, 5)) * 1000.0
    #create a mixed grid of afghanistan and japan (have very different inventory,collapse, and fatality rates.)
    isodata = np.array([[A, A, A, A, A], [A, A, A, A, A], [A, A, A, J, J],
                        [J, J, J, J, J], [J, J, J, J, J]],
                       dtype=np.int16)
    #make a mix of urban and rural cells
    urbdata = np.array([[R, R, R, R, R], [R, U, U, U, R], [R, U, U, U, U],
                        [U, U, U, R, R], [R, R, R, R, R]],
                       dtype=np.int16)
    mmidata = np.array([[6, 7, 8, 9, 6], [7, 8, 9, 6, 7], [8, 9, 6, 6, 7],
                        [8, 9, 6, 7, 8], [9, 6, 7, 8, 9]],
                       dtype=np.float32)
    homedir = os.path.dirname(
        os.path.abspath(__file__))  #where is this script?
    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    fatfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')
    growthfile = os.path.join(homedir, '..', 'data',
                              'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls')
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })

    popgrid = GMTGrid(popdata, geodict)
    isogrid = GMTGrid(isodata, geodict)
    urbgrid = GMTGrid(urbdata, geodict)
    popyear = 2016
    layers = {'mmi': mmidata}
    eventdict = {
        'event_id': '1234',
        'magnitude': 7.5,
        'lat': 34.2,
        'lon': 118.2,
        'depth': 10.0,
        'event_timestamp': datetime(2016, 1, 1, 0, 0, 0),
        'event_description': 'test data',
        'event_network': 'us'
    }
    shakedict = {
        'event_id': '1234',
        'shakemap_id': '1234',
        'shakemap_version': 1,
        'code_version': '1.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'SCENARIO'
    }
    uncdict = {'mmi': (1.0, 1)}
    mmigrid = ShakeGrid(layers, geodict, eventdict, shakedict, uncdict)

    popfile = isofile = urbfile = shakefile = ''
    try:
        #make some temporary files
        f, popfile = tempfile.mkstemp()
        os.close(f)
        f, isofile = tempfile.mkstemp()
        os.close(f)
        f, urbfile = tempfile.mkstemp()
        os.close(f)
        f, shakefile = tempfile.mkstemp()
        os.close(f)

        popgrid.save(popfile)
        isogrid.save(isofile)
        urbgrid.save(urbfile)
        mmigrid.save(shakefile)

        semi = SemiEmpiricalFatality.fromDefault()
        losses, resfat, nonresfat = semi.getLosses(shakefile)
        assert losses == 85
        print('Semi-empirical model calculations appear to be done correctly.')
    except:
        print(
            'There is an error attempting to do semi-empirical loss calculations.'
        )
    finally:
        files = [popfile, isofile, urbfile, shakefile]
        for fname in files:
            if os.path.isfile(fname):
                os.remove(fname)
Beispiel #7
0
def make_test_semi_model(ccode, timeofday, density, popvalue, mmi):
    """Run the semi-empirical model for a single value of input.  Intended for testing purposes.

    :param ccode:
      Two letter ISO country code ('US', 'JP', etc.) to be used to extract inventory, collapse rates, etc.
    :param timeofday:
      One of 'day','night' - used to determine residential/non-residental population distribution and casualty rates.
    :param density:
      One of semimodel.URBAN (2) or semimodel.RURAL (1).
    :param popvalue:
      Scalar population value to multiply by inventory, collapse, and fatality rates.
    :param mmi:
      MMI value used to extract collapse rates in given country code.
    :returns:
      Tuple of:
        1) Total number of fatalities
        2) Dictionary of residential fatalities per building type, per country.
        3) Dictionary of non-residential fatalities per building type, per country.
    """
    country = Country()
    cdict = country.getCountry(ccode)
    ucode = cdict['ISON']
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })
    if timeofday == 'day':
        etime = datetime(2016, 1, 1, 12, 0, 0)  #noon
    elif timeofday == 'transit':
        etime = datetime(2016, 1, 1, 18, 0, 0)  #6 pm
    else:
        etime = datetime(2016, 1, 1, 0, 0, 0)  #midnight
    eventdict = {
        'event_id': '1234',
        'magnitude': 7.5,
        'lat': 0.0,
        'lon': 0.0,
        'depth': 10.0,
        'event_timestamp': etime,
        'event_description': 'test data',
        'event_network': 'us'
    }
    shakedict = {
        'event_id': '1234',
        'shakemap_id': '1234',
        'shakemap_version': 1,
        'code_version': '1.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'SCENARIO'
    }
    uncdict = {'mmi': (1.0, 1)}
    popdata = np.ones((2, 2), dtype=np.float32) * (popvalue) / 4
    isodata = np.ones((2, 2), dtype=np.int16) * ucode
    urbdata = np.ones((2, 2), dtype=np.int16) * density
    mmidata = np.ones((2, 2), dtype=np.float32) * mmi
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 1.5,
        'ymin': 0.5,
        'ymax': 1.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 2,
        'ny': 2
    })
    popgrid = GMTGrid(popdata, geodict)
    isogrid = GMTGrid(isodata, geodict)
    urbgrid = GMTGrid(urbdata, geodict)
    popyear = 2016
    layers = {'mmi': mmidata}
    mmigrid = ShakeGrid(layers, geodict, eventdict, shakedict, uncdict)
    popfile = isofile = urbfile = shakefile = ''
    popsum = None
    newresfat = None
    newnresfat = None
    try:
        #make some temporary files
        f, popfile = tempfile.mkstemp()
        os.close(f)
        f, isofile = tempfile.mkstemp()
        os.close(f)
        f, urbfile = tempfile.mkstemp()
        os.close(f)
        f, shakefile = tempfile.mkstemp()
        os.close(f)

        popgrid.save(popfile)
        isogrid.save(isofile)
        urbgrid.save(urbfile)
        mmigrid.save(shakefile)

        semi = SemiEmpiricalFatality.fromDefault()
        semi.setGlobalFiles(popfile, popyear, urbfile, isofile)
        t, resfat, nonresfat = semi.getLosses(shakefile)
        popsum = 0
        newresfat = {ccode: {}}
        newnonresfat = {ccode: {}}
        for key, value in resfat[ccode].items():
            if value < 1:
                value = np.floor(value)
            newresfat[ccode][key] = value / 4.0
            popsum += value / 4.0
        for key, value in nonresfat[ccode].items():
            newnonresfat[ccode][key] = value / 4.0
            if value < 1:
                value = np.floor(value)
            popsum += value / 4.0
        popsum = int(popsum)
    finally:
        files = [popfile, isofile, urbfile, shakefile]
        for fname in files:
            if os.path.isfile(fname):
                os.remove(fname)
    return (popsum, newresfat, newnonresfat)
Beispiel #8
0
def makeTestData():
    # make test layers and config files (model, and mapping)
    Xmod = ['friction', 'slope', 'vs30', 'cti1', 'precip']
    terms = ['friction', 'slope/100.', 'log(vs30)', 'cti1', 'precipMONTH']
    coefficients = [0.3, 2.1, -0.5, 0.01, 1.0]
    Xhaz = ['susceptibility']
    Xgod = ['cohesion', 'friction', 'slope']
    Xcla = ['cohesion', 'friction', 'slope', 'watertable']

    configModel = OrderedDict()
    configHazus = OrderedDict()
    configGodt = OrderedDict()
    configClassic = OrderedDict()

    # Work on model configs

    #LOGISTIC
    configModel.setdefault('test_model', {})
    configModel['test_model'].setdefault('shortref', 'Name et al. year')
    configModel['test_model'].setdefault('longref', 'full reference')
    configModel['test_model'].setdefault('layers', {})
    configModel['test_model'].setdefault('interpolations', {})
    configModel['test_model'].setdefault('terms', {})
    configModel['test_model'].setdefault('coefficients', {})['b0'] = '3.5'
    configModel['test_model'].setdefault(
        'display_options', {
            'lims': {
                'model': 'np.linspace(0., 0.4, 10)'
            },
            'colors': {
                'default': 'cm.inferno',
                'alpha': '0.7',
                'model': 'cm.jet'
            },
            'logscale': {
                'model': 'False'
            },
            'maskthresholds': {
                'model': '0.001'
            }
        })
    configModel['test_model']['gfeype'] = 'landslide'
    configModel['test_model']['baselayer'] = 'slope'
    configModel['test_model']['slopemin'] = 5.
    configModel['test_model']['slopemax'] = 90.
    configModel['test_model']['display_options'].setdefault(
        'pga', {
            'lims': 'None',
            'colors': 'cm.jet',
            'logscale': 'True'
        })
    configModel['test_model']['display_options'].setdefault(
        'pgv', {
            'lims': 'None',
            'colors': 'cm.jet',
            'logscale': 'False'
        })

    # HAZUS
    configHazus.setdefault('hazus', {})
    configHazus['hazus'].setdefault('shortref', 'Name et al. year')
    configHazus['hazus'].setdefault('longref', 'full reference')
    configHazus['hazus']['gfeype'] = 'landslide'
    configHazus['hazus'].setdefault('layers', {})
    configHazus['hazus'].setdefault('parameters', {'dnthresh': 5.})
    configHazus['hazus'].setdefault(
        'display_options', {
            'lims': {
                'model': 'np.linspace(0., 0.4, 10)'
            },
            'colors': {
                'default': 'cm.inferno',
                'alpha': '0.7',
                'model': 'cm.jet'
            },
            'logscale': {
                'model': 'False'
            },
            'maskthresholds': {
                'model': '0.001'
            }
        })
    configHazus['hazus']['display_options'].setdefault('susceptibility', {
        'lims': 'None',
        'colors': 'cm.jet',
        'logscale': 'True'
    })

    # GODT
    configGodt.setdefault('godt_2008', {})
    configGodt['godt_2008'].setdefault('shortref', 'Name et al. year')
    configGodt['godt_2008'].setdefault('longref', 'full reference')
    configGodt['godt_2008']['gfeype'] = 'landslide'
    configGodt['godt_2008'].setdefault('layers', {})
    configGodt['godt_2008'].setdefault(
        'parameters', {
            'thick': '2.4',
            'uwt': '15.7',
            'nodata_cohesion': '1.0',
            'nodata_friction': '26.',
            'dnthresh': '5.',
            'fsthresh': '1.01',
            'acthresh': '0.05'
        })
    configGodt['godt_2008'].setdefault(
        'display_options', {
            'lims': {
                'model': 'np.linspace(0., 0.4, 10)'
            },
            'colors': {
                'default': 'cm.inferno',
                'alpha': '0.7',
                'model': 'cm.jet'
            },
            'logscale': {
                'model': 'False'
            },
            'maskthresholds': {
                'model': '0.001'
            }
        })
    configGodt['godt_2008']['display_options'].setdefault(
        'pga', {
            'lims': 'None',
            'colors': 'cm.jet',
            'logscale': 'True'
        })

    # NEWMARK
    configClassic.setdefault('classic_newmark', {})
    configClassic['classic_newmark'].setdefault('shortref', 'Name et al. year')
    configClassic['classic_newmark'].setdefault('longref', 'full reference')
    configClassic['classic_newmark']['gfeype'] = 'landslide'
    configClassic['classic_newmark'].setdefault('layers', {})
    configClassic['classic_newmark'].setdefault(
        'parameters', {
            'thick': '2.4',
            'uwt': '15.7',
            'nodata_cohesion': '1.0',
            'nodata_friction': '26.',
            'dnthresh': '5.',
            'fsthresh': '1.01',
            'acthresh': '0.05',
            'slopethresh': '5.',
            'm': '0.5'
        })
    configClassic['classic_newmark'].setdefault(
        'display_options', {
            'lims': {
                'model': 'np.linspace(0., 0.4, 10)'
            },
            'colors': {
                'default': 'cm.inferno',
                'alpha': '0.7',
                'model': 'cm.jet'
            },
            'logscale': {
                'model': 'False'
            },
            'maskthresholds': {
                'model': '0.001'
            }
        })
    configClassic['classic_newmark']['display_options'].setdefault(
        'pga', {
            'lims': 'None',
            'colors': 'cm.jet',
            'logscale': 'True'
        })

    for k, items in enumerate(Xmod):
        coef = 'b%1d' % (k + 1)
        # make a GDALGrid object
        testgrid = GDALGrid(eval(items), geodict)
        # Save the file
        if items == 'precip':
            try:
                os.mkdir('test_precip')
            except:
                pass
            filename = 'test_precip/prec_Jan.bil'  # Only make January for testing
        else:
            filename = 'test_%s.bil' % (items)
        testgrid.save(filename, format='EHdr')

        # add to test config file
        configModel['test_model']['layers'].update({
            items: {
                'file': filename.split('/')[0],
                'units': units[items],
                'longref': 'longref',
                'shortref': 'shortref'
            }
        })
        configModel['test_model']['interpolations'].update({items: 'nearest'})
        configModel['test_model']['terms'].update({coef: terms[k]})
        configModel['test_model']['coefficients'].update(
            {coef: coefficients[k]})
        configModel['test_model']['display_options']['lims'].update(
            {items: 'None'})
        configModel['test_model']['display_options']['colors'].update(
            {items: 'None'})
        configModel['test_model']['display_options']['logscale'].update(
            {items: 'False'})
        configModel['test_model']['display_options']['maskthresholds'].update(
            {items: 'None'})

    for k, items in enumerate(Xhaz):
        # Save the file if not already saved
        filename = 'test_%s.bil' % (items)
        if items == 'susceptibility':
            testgrid = GDALGrid(eval(items), susgeodict)
            testgrid.save(filename, format='EHdr')

        # add to test config file
        configHazus['hazus']['layers'].update({
            items: {
                'file': filename,
                'units': units[items],
                'longref': 'longref',
                'shortref': 'shortref'
            }
        })
        configHazus['hazus']['display_options']['lims'].update({items: 'None'})
        configHazus['hazus']['display_options']['colors'].update(
            {items: 'None'})
        configHazus['hazus']['display_options']['logscale'].update(
            {items: 'False'})
        configHazus['hazus']['display_options']['maskthresholds'].update(
            {items: 'None'})

    for k, items in enumerate(Xgod):
        # make a GDALGrid object
        if items == 'slope':
            testgrid = GDALGrid(eval('slopeGodt'), geodict)
        else:
            testgrid = GDALGrid(eval(items), geodict)
        # Save the file
        filename = 'test_%s.bil' % (items)
        word = 'file'
        if items == 'slope':
            word = 'filepath'
            try:
                os.mkdir('test_slope_quantiles_godt')
            except:
                pass
            for j, slp in enumerate(slopequants):
                filename = 'test_slope_quantiles_godt/slope%s.bil' % (
                    slopequantnames[j])  # Only make January for testing
                temp = GDALGrid(eval('slopeGodt') * slopequants[j], geodict)
                temp.save(filename, format='EHdr')
            filename = 'test_slope_quantiles_godt'
        elif items == 'cohesion':
            testgrid.save(filename, format='EHdr')

        # add to test config file
        configGodt['godt_2008']['layers'].update({
            items: {
                word: filename,
                'units': units[items],
                'longref': 'longref',
                'shortref': 'shortref'
            }
        })
        configGodt['godt_2008']['display_options']['lims'].update(
            {items: 'None'})
        configGodt['godt_2008']['display_options']['colors'].update(
            {items: 'None'})
        configGodt['godt_2008']['display_options']['logscale'].update(
            {items: 'False'})
        configGodt['godt_2008']['display_options']['maskthresholds'].update(
            {items: 'None'})

    for k, items in enumerate(Xcla):
        # make a GDALGrid object
        testgrid = GDALGrid(eval(items), geodict)
        # Save the file
        filename = 'test_%s.bil' % (items)
        if items == 'watertable':
            testgrid.save(filename, format='EHdr')

        # add to test config file
        configClassic['classic_newmark']['layers'].update({
            items: {
                'file': filename,
                'units': units[items],
                'longref': 'longref',
                'shortref': 'shortref'
            }
        })
        configClassic['classic_newmark']['display_options']['lims'].update(
            {items: 'None'})
        configClassic['classic_newmark']['display_options']['colors'].update(
            {items: 'None'})
        configClassic['classic_newmark']['display_options']['logscale'].update(
            {items: 'False'})
        configClassic['classic_newmark']['display_options'][
            'maskthresholds'].update({items: 'None'})

    # Make test_shakegrid and test_uncert
    eventDict = OrderedDict([
        ('event_id', 'test'), ('lon', 0.5), ('lat', 0.5),
        ('event_timestamp', datetime(2000, 1, 5, 0, 30, 55)),
        ('event_network', 'na'), ('magnitude', 6.0),
        ('event_description', 'Test event'), ('depth', 5.0)
    ])
    shakeDict = OrderedDict([('process_timestamp',
                              datetime(2000, 1, 6, 20, 38, 19)),
                             ('event_id', 'test'), ('shakemap_version', 2),
                             ('code_version', '1 billion'),
                             ('shakemap_event_type', 'TEST'),
                             ('map_status', 'TEST'), ('shakemap_id', 'test'),
                             ('shakemap_originator', 'na')])
    uncertaintyDict = {}

    layers1 = {'pga': pga, 'pgv': pgv}
    shakegrid = ShakeGrid(layers1, geodict, eventDict, shakeDict,
                          uncertaintyDict)
    shakegrid.save('test_shakegrid.xml')

    layers2 = {'stdpga': stdpga, 'stdpgv': stdpgv}
    uncertgrid = ShakeGrid(layers2, geodict, eventDict, shakeDict,
                           uncertaintyDict)
    uncertgrid.save('test_uncert.xml')

    layers3 = {'pga': pga1, 'pgv': pgv1}
    shakegrid = ShakeGrid(layers3, geodict1, eventDict, shakeDict,
                          uncertaintyDict)
    shakegrid.save('test_shakegrid_bounds.xml')

    C = ConfigObj(configModel)
    C.filename = 'testconfig_logimodel.ini'
    C.write()

    C = ConfigObj(configHazus)
    C.filename = 'testconfig_hazus.ini'
    C.write()

    C = ConfigObj(configClassic)
    C.filename = 'testconfig_classic.ini'
    C.write()

    C = ConfigObj(configGodt)
    C.filename = 'testconfig_godt.ini'
    C.write()

    configMap = OrderedDict({
        'dem': {
            'file': 'None'
        },
        'roads': {
            'file': 'None'
        },
        'cities': {
            'file': 'None'
        },
        'ocean': {
            'file': 'None'
        },
        'colors': {
            'roadcolor': '808080',
            'countrycolor': '474747',
            'watercolor': 'B8EEFF'
        }
    })
    C = ConfigObj(configMap)
    C.filename = 'testconfig_map.ini'
    C.write()
Beispiel #9
0
    def execute(self):
        """Create grid.xml and uncertainty.xml files.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        logger = logging.getLogger(__name__)
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)

        # get all of the grid layers and the geodict
        if container.getDataType() != 'grid':
            raise NotImplementedError('gridxml module can only function on '
                                      'gridded data, not sets of points')

        components = container.getComponents()
        for component in components:
            xml_types = ['grid', 'uncertainty']
            for xml_type in xml_types:
                layers = OrderedDict()
                field_keys = OrderedDict()
                gridnames = container.getIMTs(component)
                for gridname in gridnames:
                    imt_field = _oq_to_gridxml(gridname)
                    imtdict = container.getIMTGrids(gridname, component)
                    if xml_type == 'grid':
                        grid_data = imtdict['mean']
                        metadata = imtdict['mean_metadata']
                    elif xml_type == 'uncertainty':
                        grid_data = imtdict['std']
                        metadata = imtdict['std_metadata']

                    units = metadata['units']
                    digits = metadata['digits']
                    # convert from HDF units to legacy grid.xml units
                    if xml_type == 'grid':
                        if units == 'ln(cm/s)':
                            grid_data = np.exp(grid_data)
                            units = 'cm/s'
                        elif units == 'ln(g)':
                            grid_data = np.exp(grid_data) * 100
                            units = '%g'
                        else:
                            pass

                    if xml_type == 'grid':
                        layers[imt_field] = grid_data
                        field_keys[imt_field] = (units, digits)
                    else:
                        layers['STD' + imt_field] = grid_data
                        field_keys['STD' + imt_field] = (units, digits)

                if xml_type == 'grid':
                    grid_data, _ = container.getArray([], 'vs30')
                    units = 'm/s'
                    digits = metadata['digits']
                    layers['SVEL'] = grid_data
                    field_keys['SVEL'] = (units, digits)

                geodict = GeoDict(metadata)

                config = container.getConfig()

                # event dictionary
                info = container.getMetadata()
                event_info = info['input']['event_information']
                event_dict = {}
                event_dict['event_id'] = event_info['event_id']
                event_dict['magnitude'] = float(event_info['magnitude'])
                event_dict['depth'] = float(event_info['depth'])
                event_dict['lat'] = float(event_info['latitude'])
                event_dict['lon'] = float(event_info['longitude'])
                try:
                    event_dict['event_timestamp'] = datetime.strptime(
                        event_info['origin_time'], constants.TIMEFMT)
                except ValueError:
                    event_dict['event_timestamp'] = datetime.strptime(
                        event_info['origin_time'], constants.ALT_TIMEFMT)
                event_dict['event_description'] = event_info['location']
                event_dict['event_network'] = \
                    info['input']['event_information']['eventsource']
                event_dict['intensity_observations'] =\
                    info['input']['event_information']['intensity_observations']
                event_dict['seismic_stations'] =\
                    info['input']['event_information']['seismic_stations']
                if info['input']['event_information']['fault_ref'] == 'Origin':
                    event_dict['point_source'] = 'True'
                else:
                    event_dict['point_source'] = 'False'

                # shake dictionary
                shake_dict = {}
                shake_dict['event_id'] = event_dict['event_id']
                shake_dict['shakemap_id'] = event_dict['event_id']
                shake_dict['shakemap_version'] = \
                    info['processing']['shakemap_versions']['map_version']
                shake_dict['code_version'] = shakemap.__version__
                ptime = info['processing']['shakemap_versions']['process_time']
                try:
                    shake_dict['process_timestamp'] = datetime.strptime(
                        ptime, constants.TIMEFMT)
                except ValueError:
                    shake_dict['process_timestamp'] = datetime.strptime(
                        ptime, constants.ALT_TIMEFMT)

                shake_dict['shakemap_originator'] = \
                    config['system']['source_network']
                shake_dict['map_status'] = config['system']['map_status']
                shake_dict['shakemap_event_type'] = 'ACTUAL'
                if event_dict['event_id'].endswith('_se'):
                    shake_dict['shakemap_event_type'] = 'SCENARIO'

                shake_grid = ShakeGrid(layers,
                                       geodict,
                                       event_dict,
                                       shake_dict, {},
                                       field_keys=field_keys)
                if component == 'GREATER_OF_TWO_HORIZONTAL':
                    fname = os.path.join(datadir, '%s.xml' % xml_type)
                else:
                    fname = os.path.join(datadir,
                                         '%s_%s.xml' % (xml_type, component))
                logger.debug('Saving IMT grids to %s' % fname)
                shake_grid.save(fname)  # TODO - set grid version number
                cname = os.path.split(fname)[1]

                if xml_type == 'grid':
                    self.contents.addFile(
                        'xmlGrids', 'XML Grid',
                        'XML grid of %s ground motions' % component, cname,
                        'text/xml')
                else:
                    self.contents.addFile(
                        'uncertaintyGrids', 'Uncertainty Grid',
                        'XML grid of %s uncertainties' % component, cname,
                        'text/xml')

        container.close()