Ejemplo n.º 1
0
def test_rasterize():
    geodict = GeoDict({'xmin':0.5,'xmax':3.5,
                       'ymin':0.5,'ymax':3.5,
                       'dx':1.0,'dy':1.0,
                       'ny':4,'nx':4})
    print('Testing rasterizeFromGeometry() burning in values from a polygon sequence...')
    #Define two simple polygons and assign them to shapes
    poly1 = [(0.25,3.75),(1.25,3.25),(1.25,2.25)]
    poly2 = [(2.25,3.75),(3.25,3.75),(3.75,2.75),(3.75,1.50),(3.25,0.75),(2.25,2.25)]
    shape1 = {'properties':{'value':5},'geometry':mapping(Polygon(poly1))}
    shape2 = {'properties':{'value':7},'geometry':mapping(Polygon(poly2))}
    shapes = [shape1,shape2]
    print('Testing burning in values where polygons need not contain pixel centers...')
    grid = Grid2D.rasterizeFromGeometry(shapes,geodict,fillValue=0,attribute='value',mustContainCenter=False)
    output = np.array([[5,5,7,7],
                       [5,5,7,7],
                       [0,0,7,7],
                       [0,0,0,7]])
    np.testing.assert_almost_equal(grid.getData(),output)
    print('Passed burning in values where polygons need not contain pixel centers.')

    print('Testing burning in values where polygons must contain pixel centers...')
    grid2 = Grid2D.rasterizeFromGeometry(shapes,geodict,fillValue=0,attribute='value',mustContainCenter=True)
    output = np.array([[5,0,7,0],
                       [0,0,7,7],
                       [0,0,0,7],
                       [0,0,0,0]])
    np.testing.assert_almost_equal(grid2.getData(),output)
    print('Passed burning in values where polygons must contain pixel centers.')
Ejemplo n.º 2
0
def test_rasterize():
    geodict = GeoDict({'xmin': 0.5, 'xmax': 3.5,
                       'ymin': 0.5, 'ymax': 3.5,
                       'dx': 1.0, 'dy': 1.0,
                       'ny': 4, 'nx': 4})
    print('Testing rasterizeFromGeometry() burning in values from a polygon sequence...')
    # Define two simple polygons and assign them to shapes
    poly1 = [(0.25, 3.75), (1.25, 3.25), (1.25, 2.25)]
    poly2 = [(2.25, 3.75), (3.25, 3.75), (3.75, 2.75),
             (3.75, 1.50), (3.25, 0.75), (2.25, 2.25)]
    shape1 = {'properties': {'value': 5}, 'geometry': mapping(Polygon(poly1))}
    shape2 = {'properties': {'value': 7}, 'geometry': mapping(Polygon(poly2))}
    shapes = [shape1, shape2]
    print('Testing burning in values where polygons need not contain pixel centers...')
    grid = Grid2D.rasterizeFromGeometry(
        shapes, geodict, fillValue=0, attribute='value', mustContainCenter=False)
    output = np.array([[5, 5, 7, 7],
                       [5, 5, 7, 7],
                       [0, 0, 7, 7],
                       [0, 0, 0, 7]])
    np.testing.assert_almost_equal(grid.getData(), output)
    print('Passed burning in values where polygons need not contain pixel centers.')

    print('Testing burning in values where polygons must contain pixel centers...')
    grid2 = Grid2D.rasterizeFromGeometry(
        shapes, geodict, fillValue=0, attribute='value', mustContainCenter=True)
    output = np.array([[5, 0, 7, 0],
                       [0, 0, 7, 7],
                       [0, 0, 0, 7],
                       [0, 0, 0, 0]])
    np.testing.assert_almost_equal(grid2.getData(), output)
    print('Passed burning in values where polygons must contain pixel centers.')
Ejemplo n.º 3
0
def test_grid_hdf_container():
    f,fname = tempfile.mkstemp()
    os.close(f)
    try:
        #test grid container
        container = GridHDFContainer.create(fname)

        # before we put anything in here, let's make sure we get empty lists from
        # all of the methods that are supposed to return lists of stuff.
        assert container.getGrids() == []
        
        #test grid2d
        geodict = GeoDict.createDictFromBox(-118.5,-114.5,32.1,36.7,0.01,0.02)
        nrows,ncols = geodict.ny,geodict.nx
        data = np.random.rand(nrows,ncols)
        metadata = {'name':'Gandalf',
                    'color':'white',
                    'powers':'magic'}
        grid = Grid2D(data,geodict)
        container.setGrid('testgrid',grid,metadata=metadata)
        outgrid,outmetadata = container.getGrid('testgrid')
        np.testing.assert_array_equal(outgrid.getData(),data)
        assert outgrid.getGeoDict() == geodict
        assert outmetadata == metadata

        #set another grid without compression
        geodict = GeoDict.createDictFromBox(-119.5,-115.5,32.3,37.7,0.01,0.02)
        nrows,ncols = geodict.ny,geodict.nx
        data = np.random.rand(nrows,ncols)
        metadata = {'name':'Legolas',
                    'color':'green',
                    'powers':'stealth'}
        grid2 = Grid2D(data,geodict)
        container.setGrid('testgrid2',grid2,metadata=metadata,compression=False)
        outgrid2,outmetadata2 = container.getGrid('testgrid2')
        np.testing.assert_array_equal(outgrid2.getData(),data)
        assert outgrid2.getGeoDict() == geodict
        assert outmetadata2 == metadata
        
        #test getGridNames()
        names = container.getGrids()
        assert sorted(names) == ['testgrid','testgrid2']

        #test looking for a grid that does not exist
        try:
            container.getGrid('foo')
        except LookupError as le:
            pass

        #test dropping a grid
        container.dropGrid('testgrid2')

        container.close()
        container2 = GridHDFContainer.load(fname)
        names = container2.getGrids()
        assert sorted(names) == ['testgrid']
    except:
        assert 1==2
    finally:
        os.remove(fname)
Ejemplo n.º 4
0
def _trim_grid(ingrid):
    outgrid = Grid2D.copyFromGrid(ingrid)
    while np.isnan(outgrid._data).any():
        nrows, ncols = outgrid._data.shape
        top = outgrid._data[0, :]
        bottom = outgrid._data[-1, :]
        left = outgrid._data[:, 0]
        right = outgrid._data[:, -1]
        ftop = np.isnan(top).sum() / ncols
        fbottom = np.isnan(bottom).sum() / ncols
        fleft = np.isnan(left).sum() / nrows
        fright = np.isnan(right).sum() / nrows
        side = np.argmax([ftop, fbottom, fleft, fright])
        gdict = outgrid.getGeoDict().asDict()
        if side == 0:  # removing top row
            outgrid._data = outgrid._data[1:, :]
            gdict['ymax'] -= gdict['dy']
            gdict['ny'] -= 1
        elif side == 1:  # removing bottom row
            outgrid._data = outgrid._data[0:-1, :]
            gdict['ymin'] += gdict['dy']
            gdict['ny'] -= 1
        elif side == 2:  # removing left column
            outgrid._data = outgrid._data[:, 1:]
            gdict['xmin'] += gdict['dx']
            gdict['nx'] -= 1
        elif side == 3:  # removing right column
            outgrid._data = outgrid._data[:, 0:-1]
            gdict['xmax'] -= gdict['dx']
            gdict['nx'] -= 1
        geodict = GeoDict(gdict)
        outgrid = Grid2D(data=outgrid._data, geodict=geodict)

    return outgrid
Ejemplo n.º 5
0
    def getIMTGrids(self, imt_name, component):
        """
        Retrieve a Grid2D object and any associated metadata from the
        container.

        Args:
            imt_name (str):
                The name of the IMT stored in the container.

        Returns:
            dict: Dictionary containing 4 items:
                   - mean Grid2D object for IMT mean values.
                   - mean_metadata Dictionary containing any metadata
                     describing mean layer.
                   - std Grid2D object for IMT standard deviation values.
                   - std_metadata Dictionary containing any metadata describing
                     standard deviation layer.
        """

        if self.getDataType() != 'grid':
            raise TypeError('Requesting grid data from file containing points')

        group_name = '%s_%s' % (imt_name, component)
        if GROUPS['imt'] not in self._hdfobj:
            raise LookupError('No IMTs stored in HDF file %s'
                              % (self.getFileName()))
        if group_name not in self._hdfobj[GROUPS['imt']]:
            raise LookupError('No group called %s in HDF file %s'
                              % (imt_name, self.getFileName()))
        imt_group = self._hdfobj[GROUPS['imt']][group_name]

        # get the mean data and metadata
        mean_dset = imt_group['mean']
        mean_data = mean_dset[()]

        array_metadata, mean_metadata = _split_dset_attrs(mean_dset)
        mean_geodict = GeoDict(array_metadata)
        mean_grid = Grid2D(mean_data, mean_geodict)

        # get the std data and metadata
        std_dset = imt_group['std']
        std_data = std_dset[()]

        array_metadata, std_metadata = _split_dset_attrs(std_dset)
        std_geodict = GeoDict(array_metadata)
        std_grid = Grid2D(std_data, std_geodict)

        # create an output dictionary
        imt_dict = {
            'mean': mean_grid,
            'mean_metadata': mean_metadata,
            'std': std_grid,
            'std_metadata': std_metadata
        }
        return imt_dict
Ejemplo n.º 6
0
def test_interpolate():
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 6.5,
        'ymin': 1.5,
        'ymax': 6.5,
        'dx': 1.0,
        'dy': 1.0,
        'ny': 6,
        'nx': 7
    })
    data = np.arange(14, 56).reshape(6, 7)

    for method in ['nearest', 'linear', 'cubic']:
        print('Testing interpolate with method "%s"...' % method)
        grid = Grid2D(data, geodict)
        sampledict = GeoDict({
            'xmin': 3.0,
            'xmax': 4.0,
            'ymin': 3.0,
            'ymax': 4.0,
            'dx': 1.0,
            'dy': 1.0,
            'ny': 2,
            'nx': 2
        })
        grid = grid.interpolateToGrid(sampledict, method=method)
        tgrid = grid.interpolate2(sampledict, method=method)
        if method == 'nearest':
            output = np.array([[30.0, 31.0], [37.0, 38.0]])
        elif method == 'linear':
            output = np.array([[34., 35.], [41., 42.]])
        elif method == 'cubic':
            output = np.array([[34., 35.], [41., 42.]])
        else:
            pass
        np.testing.assert_almost_equal(grid.getData(), output)
        print('Passed interpolate with method "%s".' % method)
        np.testing.assert_almost_equal(tgrid.getData(), output)
        print('Passed interpolate2 with method "%s".' % method)

    # speed test of interpolateToGrid and interpolate2
    geodict = GeoDict.createDictFromBox(0, 10, 0, 10, 0.01, 0.01)
    data = np.random.rand(geodict.ny, geodict.nx)
    grid = Grid2D(data, geodict)
    sampledict = GeoDict.createDictFromBox(2, 8, 2, 8, 0.098, 0.098)
    t1 = time.time()
    grid2 = grid.interpolateToGrid(sampledict, method='linear')
    t2 = time.time()
    grid3 = grid.interpolate2(sampledict, method='linear')
    t3 = time.time()
    #np.testing.assert_almost_equal(grid2._data.sum(),grid3._data.sum())
    print('scipy method: %.3f seconds' % (t2 - t1))
    print('gdal  method: %.3f seconds' % (t3 - t2))
Ejemplo n.º 7
0
def test():
    print('Testing MultiGrid interpolate...')
    data = np.arange(14,56).reshape(6,7)
    geodict = GeoDict({'xmin':0.5,'xmax':6.5,'ymin':1.5,'ymax':6.5,'dx':1.0,'dy':1.0,'ny':6,'nx':7})
    layers = OrderedDict()
    layers['layer1'] = Grid2D(data,geodict)
    mgrid = MultiGrid(layers)
    sampledict = GeoDict({'xmin':3.0,'xmax':4.0,
                          'ymin':3.0,'ymax':4.0,
                          'dx':1.0,'dy':1.0,
                          'ny':2,'nx':2})
    for method in ['nearest','linear','cubic']:
        mgrid2 = mgrid.interpolateToGrid(sampledict,method=method)
        if method == 'nearest':
            output = np.array([[30.0,31.0],[37.0,38.0]])
        elif method == 'linear':
            output = np.array([[34.,35.],[41.,42.]])
        elif method == 'cubic':
            output = np.array([[34.,35.],[41.,42.]])
        else:
            pass
        np.testing.assert_almost_equal(mgrid2.getLayer('layer1').getData(),output)
    print('Passed MultiGrid interpolate test.')

    print('Testing bounds retrieval...')
    b1 = np.array(mgrid.getBounds())
    b2 = np.array((geodict.xmin,geodict.xmax,geodict.ymin,geodict.ymax))
    np.testing.assert_almost_equal(b1,b2)
    print('Passed bounds retrieval...')
    
    print('Testing MultiGrid subdivide test...')
    data = np.arange(0,9).reshape((3,3))
    geodict = GeoDict({'xmin':0.0,'xmax':10.0,
                       'ymin':0.0,'ymax':10.0,
                       'dx':5.0,'dy':5.0,
                       'ny':3,'nx':3})
    layers = OrderedDict()
    layers['layer1'] = Grid2D(data,geodict)
    hostgrid = MultiGrid(layers)
    finedict = GeoDict({'xmin':-2.5,'xmax':11.5,
                        'ymin':-1.5,'ymax':10.5,
                        'dx':2.0,'dy':2.0,
                        'nx':8,'ny':7})
    N = np.nan
    finegrid = hostgrid.subdivide(finedict,cellFill='min')
    output = np.array([[ N,   0.,   0.,   1.,   1.,   1.,   2.,   2.],
                       [ N,   0.,   0.,   1.,   1.,   1.,   2.,   2.],
                       [ N,   3.,   3.,   4.,   4.,   4.,   5.,   5.],
                       [ N,   3.,   3.,   4.,   4.,   4.,   5.,   5.],
                       [ N,   3.,   3.,   4.,   4.,   4.,   5.,   5.],
                       [ N,   6.,   6.,   7.,   7.,   7.,   8.,   8.],
                       [ N,   6.,   6.,   7.,   7.,   7.,   8.,   8.]])
    np.testing.assert_almost_equal(finegrid.getLayer('layer1').getData(),output)
    print('Passed MultiGrid subdivide test.')
Ejemplo n.º 8
0
    def getIMT(self, imt_name, component):
        """
        Retrieve a Grid2D object and any associated metadata from the container.

        Args:
            imt_name (str):
                The name of the Grid2D object stored in the container.

        Returns:
            dict: Dictionary containing 4 items:
                   - mean Grid2D object for IMT mean values.
                   - mean_metadata Dictionary containing any metadata
                     describing mean layer.
                   - std Grid2D object for IMT standard deviation values.
                   - std_metadata Dictionary containing any metadata describing
                     standard deviation layer.
        """
        logger = logging.getLogger()
        logger.info('Inside OutputContainer')
        group_name = '__imt_%s_%s__' % (imt_name, component)
        if group_name not in self._hdfobj:
            raise LookupError('No group called %s in HDF file %s'
                              % (imt_name, self.getFileName()))
        imt_group = self._hdfobj[group_name]

        # get the mean data and metadata
        mean_name = '__mean_%s_%s__' % (imt_name, component)
        mean_dset = imt_group[mean_name]
        mean_data = mean_dset[()]

        array_metadata, mean_metadata = _split_dset_attrs(mean_dset)
        mean_geodict = GeoDict(array_metadata)
        mean_grid = Grid2D(mean_data, mean_geodict)

        # get the std data and metadata
        std_name = '__std_%s_%s__' % (imt_name, component)
        std_dset = imt_group[std_name]
        std_data = std_dset[()]

        array_metadata, std_metadata = _split_dset_attrs(std_dset)
        std_geodict = GeoDict(array_metadata)
        std_grid = Grid2D(std_data, std_geodict)

        # create an output dictionary
        imt_dict = {
            'mean': mean_grid,
            'mean_metadata': mean_metadata,
            'std': std_grid,
            'std_metadata': std_metadata
        }
        return imt_dict
Ejemplo n.º 9
0
def test_cut():
    geodict = GeoDict({'xmin': 0.5, 'xmax': 4.5, 'ymin': 0.5,
                       'ymax': 4.5, 'dx': 1.0, 'dy': 1.0, 'ny': 5, 'nx': 5})
    data = np.arange(0, 25).reshape(5, 5)

    print('Testing data extraction...')
    grid = Grid2D(data, geodict)
    xmin, xmax, ymin, ymax = (2.5, 3.5, 2.5, 3.5)
    newgrid = grid.cut(xmin, xmax, ymin, ymax)
    output = np.array([[7, 8], [12, 13]])
    np.testing.assert_almost_equal(newgrid.getData(), output)
    print('Passed data extraction...')

    print('Testing data trimming with resampling...')
    # make a more complicated test using getboundswithin
    data = np.arange(0, 84).reshape(7, 12)
    geodict = GeoDict({'xmin': -180, 'xmax': 150,
                       'ymin': -90, 'ymax': 90,
                       'dx': 30, 'dy': 30,
                       'nx': 12, 'ny': 7})
    grid = Grid2D(data, geodict)
    sampledict = GeoDict.createDictFromBox(-75,
                                           45, -45, 75, geodict.dx, geodict.dy)
    cutdict = geodict.getBoundsWithin(sampledict)
    newgrid = grid.cut(cutdict.xmin, cutdict.xmax, cutdict.ymin, cutdict.ymax)
    output = np.array([[16, 17, 18, 19],
                       [28, 29, 30, 31],
                       [40, 41, 42, 43],
                       [52, 53, 54, 55]])
    np.testing.assert_almost_equal(newgrid.getData(), output)
    print('Passed data trimming with resampling...')

    print('Test cut with self-alignment...')
    geodict = GeoDict({'xmin': 0.5, 'xmax': 4.5,
                       'ymin': 0.5, 'ymax': 6.5,
                       'dx': 1.0, 'dy': 1.0,
                       'nx': 5, 'ny': 7})
    data = np.arange(0, 35).astype(np.float32).reshape(7, 5)
    grid = Grid2D(data, geodict)
    cutxmin = 1.7
    cutxmax = 3.7
    cutymin = 1.7
    cutymax = 5.7
    cutgrid = grid.cut(cutxmin, cutxmax, cutymin, cutymax, align=True)
    output = np.array([[7, 8],
                       [12, 13],
                       [17, 18],
                       [22, 23]])
    np.testing.assert_almost_equal(cutgrid.getData(), output)
    print('Passed cut with self-alignment.')
Ejemplo n.º 10
0
def test_setData():
    data = np.arange(0, 16).astype(np.float32).reshape(4, 4)
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 3.5,
        'ymin': 0.5,
        'ymax': 3.5,
        'dx': 1.0,
        'dy': 1.0,
        'ny': 4,
        'nx': 4
    })
    grid1 = Grid2D(data, geodict)
    x = np.ones((4, 4))
    try:
        grid1.setData(x)  #this should pass
        print('setData test passed.')
    except DataSetException as dse:
        print('setData test failed.')
    try:
        x = np.ones((5, 5))
        grid1.setData(x)
        print('setData test did not fail when it should have.')
    except DataSetException as dse:
        print('setData test failed as expected.')

    try:
        x = 'fred'
        grid1.setData(x)
        print('setData test did not fail when it should have.')
    except DataSetException as dse:
        print('setData test failed as expected.')
Ejemplo n.º 11
0
def test_write():
    data = np.arange(0, 25).reshape(5, 5).astype(np.float32)
    gdict = {
        'xmin': 5.0,
        'xmax': 9.0,
        'ymin': 4.0,
        'ymax': 8.0,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    }
    gd = GeoDict(gdict)
    grid = Grid2D(data, gd)

    for format_type in ['netcdf', 'esri', 'hdf']:
        tdir = tempfile.mkdtemp()
        fname = os.path.join(tdir, 'tempfile.grd')
        try:
            write(grid, fname, format_type)
            src = rasterio.open(fname, 'r')
            tdata = src.read(1)
            np.testing.assert_almost_equal(tdata, data)
        except Exception as e:
            raise e
        finally:
            shutil.rmtree(tdir)
Ejemplo n.º 12
0
def getNoDataGrid(predictors,xmin,xmax,ymin,ymax):
    txmin = xmin
    txmax = xmax
    tymin = ymin
    tymax = ymax
    mindx = 9999999999
    mindy = 9999999999
    #figure out bounds enclosing all files
    for predname,predfile in predictors.items():
        if not os.path.isfile(predfile):
            continue
        ftype = getFileType(predfile)
        if ftype == 'shapefile':
            f = fiona.open(predfile,'r')
            bxmin,bymin,bxmax,bymax = f.bounds
            f.close()
            if bxmin < txmin:
                txmin = bxmin
            if bxmax > txmax:
                txmax = bxmax
            if bymin < tymin:
                tymin = bymin
            if bymax > tymax:
                tymax = bymax
        elif ftype == 'grid':
            gridtype = getGridType(predfile)
            if gridtype is None:
                raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
            fdict = getFileGeoDict(predfile,gridtype)
            if fdict.dx < mindx:
                mindx = fdict.dx
            if fdict.dy < mindy:
                mindy = fdict.dy
            if fdict.xmin < txmin:
                txmin = fdict.xmin
            if fdict.xmax > txmax:
                txmax = txmax
            if fdict.ymin < tymin:
                tymin = tymin
            if fdict.ymax > tymax:
                tymax = tymax
    sdict = GeoDict.createDictFromBox(txmin,txmax,tymin,tymax,mindx,mindy)
    nanarray = np.zeros((sdict.ny,sdict.nx),dtype=np.int8)
    for predname,predfile in predictors.items():
        if not os.path.isfile(predfile):
            continue
        ftype = getFileType(predfile)
        if ftype == 'shapefile':
            shapes = list(fiona.open(predfile,'r'))
            grid = Grid2D.rasterizeFromGeometry(shapes,sdict)
        else:
            gridtype = getGridType(predfile)
            if gridtype == 'gmt':
                grid = GMTGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True)
            else:
                grid = GDALGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True)
        nangrid = np.isnan(grid.getData())
        nanarray = nanarray | nangrid
    nangrid = Grid2D(data=nanarray,geodict=sdict)
    return nangrid
Ejemplo n.º 13
0
def get_data_range_test():
    # a standard global grid, going from -180 to 180
    normal_dict = GeoDict({'xmin': -180, 'xmax': 120,
                           'ymin': -90, 'ymax': 90,
                           'dx': 60, 'dy': 45,
                           'nx': 6, 'ny': 5})

    # test a simple example which does NOT cross the 180 meridian
    sample1 = (-125, 65, -20, 20)
    dict1 = Grid2D.getDataRange(normal_dict, sample1)
    cdict1 = {'iulx1': 0, 'iuly1': 1,
              'ilrx1': 6, 'ilry1': 4}
    assert dict1 == cdict1

    # test a less-simple example which DOES cross the 180 meridian
    sample2 = (-235, -10, -20, 20)
    dict2 = Grid2D.getDataRange(normal_dict, sample2)
    cdict2 = {'iulx1': 5, 'iuly1': 1,
              'ilrx1': 6, 'ilry1': 4,
              'iulx2': 0, 'iuly2': 1,
              'ilrx2': 4, 'ilry2': 4}
    assert dict2 == cdict2

    # test a less-simple example which DOES cross the 180 meridian, and xmin > xmax
    sample3 = (125, -10, -20, 20)
    dict3 = Grid2D.getDataRange(normal_dict, sample3)
    cdict3 = {'iulx1': 5, 'iuly1': 1,
              'ilrx1': 6, 'ilry1': 4,
              'iulx2': 0, 'iuly2': 1,
              'ilrx2': 4, 'ilry2': 4}
    assert dict3 == cdict3

    # test an example where the sample bounds are from 0 to 360
    sample4 = (160, 200, -20, 20)
    dict4 = Grid2D.getDataRange(normal_dict, sample4)
    cdict4 = {'iulx1': 5, 'iuly1': 1,
              'ilrx1': 6, 'ilry1': 4,
              'iulx2': 0, 'iuly2': 1,
              'ilrx2': 2, 'ilry2': 4}
    assert dict4 == cdict4

    # test an example where the sample bounds are from 0 to 360
    sample5 = (220, 260, -20, 20)
    dict5 = Grid2D.getDataRange(normal_dict, sample5)
    cdict5 = {'iulx1': 0, 'iuly1': 1,
              'ilrx1': 3, 'ilry1': 4}
    assert dict5 == cdict5
Ejemplo n.º 14
0
def get_data_range_test():
    #a standard global grid, going from -180 to 180
    normal_dict = GeoDict({'xmin':-180,'xmax':120,
                           'ymin':-90,'ymax':90,
                           'dx':60,'dy':45,
                           'nx':6,'ny':5})

    #test a simple example which does NOT cross the 180 meridian
    sample1 = (-125,65,-20,20)
    dict1 = Grid2D.getDataRange(normal_dict,sample1)
    cdict1 = {'iulx1':0,'iuly1':1,
              'ilrx1':6,'ilry1':4}
    assert dict1 == cdict1

    #test a less-simple example which DOES cross the 180 meridian
    sample2 = (-235,-10,-20,20)
    dict2 = Grid2D.getDataRange(normal_dict,sample2)
    cdict2 = {'iulx1':5,'iuly1':1,
              'ilrx1':6,'ilry1':4,
              'iulx2':0,'iuly2':1,
              'ilrx2':4,'ilry2':4}
    assert dict2 == cdict2
    
    #test a less-simple example which DOES cross the 180 meridian, and xmin > xmax
    sample3 = (125,-10,-20,20)
    dict3 = Grid2D.getDataRange(normal_dict,sample3)
    cdict3 = {'iulx1':5,'iuly1':1,
              'ilrx1':6,'ilry1':4,
              'iulx2':0,'iuly2':1,
              'ilrx2':4,'ilry2':4}
    assert dict3 == cdict3

    #test an example where the sample bounds are from 0 to 360
    sample4 = (160,200,-20,20)
    dict4 = Grid2D.getDataRange(normal_dict,sample4)
    cdict4 = {'iulx1':5,'iuly1':1,
              'ilrx1':6,'ilry1':4,
              'iulx2':0,'iuly2':1,
              'ilrx2':2,'ilry2':4}
    assert dict4 == cdict4
 
    #test an example where the sample bounds are from 0 to 360
    sample5 = (220,260,-20,20)
    dict5 = Grid2D.getDataRange(normal_dict,sample5)
    cdict5 = {'iulx1':0,'iuly1':1,
              'ilrx1':3,'ilry1':4}
    assert dict5 == cdict5
Ejemplo n.º 15
0
def test_copy():
    data = np.arange(0,16).astype(np.float32).reshape(4,4)
    geodict = GeoDict({'xmin':0.5,'xmax':3.5,'ymin':0.5,'ymax':3.5,'dx':1.0,'dy':1.0,'ny':4,'nx':4})
    grid1 = Grid2D(data,geodict)
    grid2 = grid1.copyFromGrid(grid1)
    grid1._data[0,0] = np.nan
    print(grid2._data)
    print(grid2._geodict)
Ejemplo n.º 16
0
    def createFromCenter(cls,
                         cx,
                         cy,
                         xspan,
                         yspan,
                         dx,
                         dy,
                         defaultVs30=686.0,
                         vs30File=None,
                         vs30measured_grid=None,
                         backarc=False,
                         padding=False,
                         resample=False):
        """
        Create a Sites object by defining a center point, resolution, extent, 
        and Vs30 values.

        :param cx:
            X coordinate of desired center point.
        :param cy:
            Y coordinate of desired center point.
        :param xspan:
            Width of desired grid.
        :param yspan:
            Height of desired grid.
        :param dx:
            Resolution of desired grid in X direction.
        :param dy:
            Resolution of desired grid in Y direction.
        :param defaultVs30:
            Default Vs30 value to use if vs30File not specified.
        :param vs30File:
            Name of GMT or GDAL format grid file containing Vs30 values.
        :param vs30measured_grid:
            Boolean grid indicating whether Vs30 values were measured or derived 
            (i.e., from slope)
        :param backarc:
            Boolean indicating whether event is on the backarc as defined
            `here <http://earthquake.usgs.gov/learn/glossary/?term=backarc>`__.
        :param padding:
            Boolean indicating whether or not to pad resulting Vs30 grid out to
            edges of input bounds. If False, grid will be clipped to the extent
            of the input file.
        :param resample:
            Boolean indicating whether or not the grid should be resampled.
        """
        geodict = GeoDict.createDictFromCenter(cx, cy, dx, dy, xspan, yspan)
        if vs30File is not None:
            vs30grid = cls._create(geodict, defaultVs30, vs30File, padding,
                                   resample)
        else:
            griddata = np.ones(
                (geodict.ny, geodict.nx), dtype=np.float64) * defaultVs30
            vs30grid = Grid2D(griddata, geodict)
        return cls(vs30grid,
                   vs30measured_grid=vs30measured_grid,
                   backarc=backarc,
                   defaultVs30=defaultVs30)
Ejemplo n.º 17
0
def test_basics():
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 3.5,
        'ymin': 0.5,
        'ymax': 3.5,
        'dx': 1.0,
        'dy': 1.0,
        'ny': 4,
        'nx': 4
    })
    data = np.arange(0, 16).reshape(4, 4).astype(np.float32)
    grid = Grid2D(data, geodict)
    print(
        'Testing basic Grid2D functionality (retrieving data, lat/lon to pixel coordinates, etc...'
    )
    np.testing.assert_almost_equal(grid.getData(), data)

    assert grid.getGeoDict() == geodict

    assert grid.getBounds() == (geodict.xmin, geodict.xmax, geodict.ymin,
                                geodict.ymax)

    lat, lon = grid.getLatLon(0, 0)

    assert lat == 3.5 and lon == 0.5

    row, col = grid.getRowCol(lat, lon)

    assert row == 0 and col == 0

    value = grid.getValue(lat, lon)

    assert value == 0

    frow, fcol = grid.getRowCol(1.0, 3.0, returnFloat=True)

    assert frow == 2.5 and fcol == 2.5

    irow, icol = grid.getRowCol(1.0, 3.0, returnFloat=False)

    assert irow == 2 and icol == 2

    #test getting values in and outside of the grid bounds
    lat = np.array([0.0, 0.5, 2.5, 4.0])
    lon = np.array([0.0, 0.5, 2.5, 4.0])
    default = np.nan
    output = np.array([np.nan, 12, 6, np.nan])
    value = grid.getValue(lat, lon, default=default)

    np.testing.assert_almost_equal(value, output)

    print(
        'Passed basic Grid2D functionality (retrieving data, lat/lon to pixel coordinates, etc...'
    )
Ejemplo n.º 18
0
    def getLossByShapes(self,
                        mmidata,
                        popdata,
                        isodata,
                        shapes,
                        geodict,
                        eventyear=None,
                        gdpobj=None):
        """Divide the losses calculated per grid cell into polygons that intersect with the grid.

        :param mmidata:
          Array of MMI values, dimensions (M,N).
        :param popdata:
          Array of population values, dimensions (M,N).
        :param isodata:
          Array of numeric country code values, dimensions (M,N).
        :param shapes:
          Sequence of GeoJSON-like polygons as returned from fiona.open().
        :param eventyear:
          4 digit event year, must be not None if loss type is economic.
        :param gdpobj:
          GDP object, containing per capita GDP data from all countries.  
          Must not be None if calculating economic losses.
        :returns:
          Tuple of:
            1) modified sequence of polygons, including a new field "fatalities" or "dollars_lost".
            2) Total number of losses in all polygons.
        """
        lossgrid = self.getLossGrid(mmidata, popdata, isodata)
        polyshapes = []
        totloss = 0
        if self._loss_type == 'fatality':
            fieldname = 'fatalities'
        else:
            fieldname = 'dollars_lost'
        for polyrec in shapes:
            polygon = shapely.geometry.shape(polyrec['geometry'])
            #overlay the polygon on top of a grid, turn polygon pixels to 1, non-polygon pixels to 0.
            tgrid = Grid2D.rasterizeFromGeometry([polygon],
                                                 geodict,
                                                 fillValue=0,
                                                 burnValue=1.0,
                                                 attribute='value',
                                                 mustContainCenter=True)
            #get the indices of the polygon cells
            shapeidx = tgrid.getData() == 1.0
            #get the sum of those cells in the loss grid
            losses = np.nansum(lossgrid[shapeidx])
            polyrec['properties'][fieldname] = int(losses)
            polyshapes.append(polyrec)
            totloss += int(losses)

        return (polyshapes, totloss)
Ejemplo n.º 19
0
def _get_average_grid(gc, contents, myimt):
    """
    Given an SA(X) IMT, attempt to find the grids that bracket its
    period and return an interpolated grid that is weighted average
    (weighted by the (log) differeences in period). If the period
    is less than the lowest, or greater than the highest, available
    period, then the closest endpoint grid is returned.

    Args:
        gc (GridHDFContainer): The container holding the amplification
            grids, labeled by IMT string.
        contents (list): A list of the IMTs available in gc.
        myimt (str): The target IMT; must be of type "SA(X)".

    Returns:
        tuple: A grid and its associated metadata.
    """

    #
    # Make a list of the SA IMTs, add the target IMT to the list
    # and then sort by period.
    #
    imt_list = [thisimt for thisimt in contents if thisimt.startswith('SA(')]
    if len(imt_list) == 0:
        logging.warning('Generic Amp Factors: No SA grids in file')
        return None, None
    imt_list.append(myimt)
    imt_list_sorted = sorted(imt_list, key=get_period_from_imt)
    nimt = len(imt_list_sorted)
    ix = imt_list_sorted.index(myimt)
    if ix == 0:
        logging.warning("Generic Amp Factors:IMT %s less than min available "
                        "imt, using %s" % (myimt, imt_list_sorted[1]))
        return gc.getGrid(imt_list_sorted[1])
    elif ix == (nimt - 1):
        logging.warning("Generic Amp Factors:IMT %s greater than max "
                        "available imt, using %s" %
                        (myimt, imt_list_sorted[-2]))
        return gc.getGrid(imt_list_sorted[-2])
    else:
        # Interpolate using (log) period: p1 is the shorter period,
        # p2 is the longer period, and p0 is the target period.
        g1, md1 = gc.getGrid(imt_list_sorted[ix - 1])
        g2, md1 = gc.getGrid(imt_list_sorted[ix + 1])
        p1 = np.log(get_period_from_imt(imt_list_sorted[ix - 1]))
        p2 = np.log(get_period_from_imt(imt_list_sorted[ix + 1]))
        p0 = np.log(get_period_from_imt(myimt))
        w1 = (p2 - p0) / (p2 - p1)
        w2 = 1.0 - w1
        gmean = g1.getData() * w1 + g2.getData() * w2
        return Grid2D(gmean, g1.getGeoDict()), md1
Ejemplo n.º 20
0
def make_generic_amps():
    imts = ['PGA', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)']
    install_path, _ = get_config_paths()
    geodict = {
        'dx': 0.016666666666666666,
        'dy': 0.016666666666666666,
        'nx': 301,
        'ny': 151,
        'xmax': -116.0,
        'xmin': -121.0,
        'ymax': 35.5,
        'ymin': 33.0
    }
    gd = GeoDict(geodict)

    # make east-west file (1s on the left, 0s on the right)
    data = np.ones((gd.ny, gd.nx))
    data[:, 151:] = 0
    outfolder = os.path.join(install_path, 'data', 'GenericAmpFactors')
    east_west_file = os.path.join(outfolder, 'Test_basin_east_west.hdf')
    east_west = GridHDFContainer.create(east_west_file)
    for imt in imts:
        grid = Grid2D(data, gd)
        east_west.setGrid(imt, grid)
    east_west.close()

    # make east-west file (1s on the left, 0s on the right)
    data = np.ones((gd.ny, gd.nx))
    data[76:151, :] = 0
    outfolder = os.path.join(install_path, 'data', 'GenericAmpFactors')
    north_south_file = os.path.join(outfolder, 'Test_basin_north_south.hdf')
    north_south = GridHDFContainer.create(north_south_file)
    for imt in imts:
        grid = Grid2D(data, gd)
        north_south.setGrid(imt, grid)
    north_south.close()

    return (east_west_file, north_south_file)
Ejemplo n.º 21
0
def create_overlay_image(container, filename):
    """Create a semi-transparent PNG image of intensity.

    Args:
        container (ShakeMapOutputContainer): Results of model.conf.
        filename (str): Path to desired output PNG file.
    Returns:
        GeoDict: GeoDict object for the intensity grid.
    """
    # extract the intensity data from the container
    comp = container.getComponents('MMI')
    if len(comp) == 0:
        return None
    comp = comp[0]
    imtdict = container.getIMTGrids('MMI', comp)
    mmigrid = imtdict['mean']
    gd = GeoDict(imtdict['mean_metadata'])
    imtdata = mmigrid.copy()
    rows, cols = imtdata.shape

    # get the intensity colormap
    palette = ColorPalette.fromPreset('mmi')

    # map intensity values into
    # RGBA array
    rgba = palette.getDataColor(imtdata, color_format='array')

    # set the alpha value to 255 wherever we have MMI 0
    rgba[imtdata <= 1.5] = 0

    if 'CALLED_FROM_PYTEST' not in os.environ:
        # mask off the areas covered by ocean
        oceans = shpreader.natural_earth(category='physical',
                                         name='ocean',
                                         resolution='10m')
        bbox = (gd.xmin, gd.ymin, gd.xmax, gd.ymax)
        with fiona.open(oceans) as c:
            tshapes = list(c.items(bbox=bbox))
            shapes = []
            for tshp in tshapes:
                shapes.append(shape(tshp[1]['geometry']))
            if len(shapes):
                oceangrid = Grid2D.rasterizeFromGeometry(shapes, gd,
                                                         fillValue=0.0)
                rgba[oceangrid.getData() == 1] = 0

    # save rgba image as png
    img = Image.fromarray(rgba)
    img.save(filename)
    return gd
Ejemplo n.º 22
0
def big_test():
    xmin = -180
    xmax = -170
    ymin = 30
    ymax = 40
    dx = 0.0083
    dy = 0.0083
    gd = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    data = np.random.rand(gd.ny, gd.nx)
    grid = Grid2D(data, gd)
    fname = os.path.join(os.path.expanduser('~'), 'tempfile.grd')
    write(grid, fname, 'hdf')
    print(fname)
    src = rasterio.open(fname, 'r')
Ejemplo n.º 23
0
    def fromBounds(cls,
                   xmin,
                   xmax,
                   ymin,
                   ymax,
                   dx,
                   dy,
                   defaultVs30=686.0,
                   vs30File=None,
                   vs30measured_grid=None,
                   backarc=None,
                   padding=False,
                   resample=False):
        """
        Create a Sites object by defining a center point, resolution, extent,
        and Vs30 values.

        Args:
            xmin: X coordinate of left edge of bounds.
            xmax: X coordinate of right edge of bounds.
            ymin: Y coordinate of bottom edge of bounds.
            ymax: Y coordinate of top edge of bounds.
            dx: Resolution of desired grid in X direction.
            dy: Resolution of desired grid in Y direction.
            defaultVs30: Default Vs30 value to use if vs30File not specified.
            vs30File: Name of GMT or GDAL format grid file containing Vs30
                values.
            vs30measured_grid: Boolean grid indicating whether Vs30 values were
                measured or derived (i.e., from slope).
            backarc: Boolean array indicating whether site is in the subduction
                `backarc <http://earthquake.usgs.gov/learn/glossary/?term=backarc>`__.
            padding: Boolean indicating whether or not to pad resulting Vs30
                grid out to edges of input bounds. If False, grid will be
                clipped to the extent of the input file.
            resample: Boolean indicating whether or not the grid should be
                resampled.
        """  # noqa
        geodict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
        if vs30File is not None:
            vs30grid = cls._create(geodict, defaultVs30, vs30File, padding,
                                   resample)
        else:
            griddata = np.ones(
                (geodict.ny, geodict.nx), dtype=np.float64) * defaultVs30
            vs30grid = Grid2D(griddata, geodict)
        return cls(vs30grid,
                   vs30measured_grid=vs30measured_grid,
                   backarc=backarc,
                   defaultVs30=defaultVs30)
Ejemplo n.º 24
0
def create_overlay_image(container, oceanfile, filename):
    """Create a semi-transparent PNG image of intensity.

    Args:
        container (ShakeMapOutputContainer): Results of model.conf.
        oceanfile (str): Path to shapefile containing ocean polygons.
        filename (str): Path to desired output PNG file.
    Returns:
        GeoDict: GeoDict object for the intensity grid.
    """
    # extract the intensity data from the container
    comp = container.getComponents('MMI')[0]
    imtdict = container.getIMTGrids('MMI', comp)
    mmigrid = imtdict['mean']
    gd = mmigrid.getGeoDict()
    imtdata = mmigrid.getData().copy()
    rows, cols = imtdata.shape

    # get the intensity colormap
    palette = ColorPalette.fromPreset('mmi')

    # map intensity values into
    # RGBA array
    rgba = palette.getDataColor(imtdata, color_format='array')

    # set the alpha value to 255 wherever we have MMI 0
    rgba[imtdata <= 1.5] = 0

    # mask off the areas covered by ocean
    bbox = (gd.xmin, gd.ymin, gd.xmax, gd.ymax)
    with fiona.open(oceanfile) as c:
        tshapes = list(c.items(bbox=bbox))
        shapes = []
        for tshp in tshapes:
            shapes.append(shape(tshp[1]['geometry']))
        if len(shapes):
            oceangrid = Grid2D.rasterizeFromGeometry(shapes, gd, fillValue=0.0)
            rgba[oceangrid.getData() == 1] = 0

    # save rgba image as png
    img = Image.fromarray(rgba)
    img.save(filename)
    return gd
Ejemplo n.º 25
0
    def calcExposure(self, shakefile):
        """Calculate population exposure to shaking.

        Calculate population exposure to shaking, per country, multiplied by event-year per-capita GDP and 
        alpha correction factor.  Also multiply the internal population grid by GDP and 
        alpha.

        :param shakefile:
          Path to ShakeMap grid.xml file.
        :returns:
          Dictionary containing country code (ISO2) keys, and values of
          10 element arrays representing population exposure to MMI 1-10.
          Dictionary will contain an additional key 'Total', with value of exposure across all countries.
        """
        #create a copy of population grid to hold population * gdp * alpha
        expdict = super(EconExposure, self).calcExposure(shakefile)
        self._econpopgrid = Grid2D.copyFromGrid(self._popgrid)
        econdict = {}
        isodata = self._isogrid.getData()
        eventyear = self.getShakeGrid().getEventDict()['event_timestamp'].year
        total = np.zeros((10, ))
        for ccode, exparray in expdict.items():
            if ccode.find('Total') > -1:
                continue
            if ccode == 'UK':  #unknown
                continue
            lossmodel = self._emploss.getModel(ccode)
            gdp, outccode = self._gdp.getGDP(ccode, eventyear)
            isocode = self._country.getCountry(ccode)['ISON']
            alpha = lossmodel.alpha
            econarray = exparray * gdp * alpha
            cidx = (isodata == isocode)
            #multiply the population grid by GDP and alpha, so that when the loss model
            #queries the grid later, those calculations don't have to be re-done.
            self._econpopgrid._data[
                cidx] = self._econpopgrid._data[cidx] * gdp * alpha
            econdict[ccode] = econarray
            total += econarray

        econdict['TotalEconomicExposure'] = total
        return econdict
Ejemplo n.º 26
0
    def getGrid(self, name):
        """
        Retrieve a Grid2D object and any associated metadata from the container.

        Args:
            name (str):
                The name of the Grid2D object stored in the container.

        Returns:
            (tuple) Grid2D object, and a dictionary of metadata.
        """
        array_name = '__grid_%s__' % name
        if array_name not in self._hdfobj:
            raise LookupError('Array %s not in %s' %
                              (name, self.getFileName()))
        dset = self._hdfobj[array_name]
        data = dset[()]
        array_metadata, meta_metadata = _split_dset_attrs(dset)
        geodict = GeoDict(array_metadata)
        grid = Grid2D(data, geodict)
        return grid, meta_metadata
Ejemplo n.º 27
0
def test_interpolate():
    geodict = GeoDict({'xmin':0.5,'xmax':6.5,'ymin':1.5,'ymax':6.5,'dx':1.0,'dy':1.0,'ny':6,'nx':7})
    data = np.arange(14,56).reshape(6,7)
    
    for method in ['nearest','linear','cubic']:
        print('Testing interpolate with method "%s"...' % method)
        grid = Grid2D(data,geodict)
        sampledict = GeoDict({'xmin':3.0,'xmax':4.0,
                              'ymin':3.0,'ymax':4.0,
                              'dx':1.0,'dy':1.0,
                              'ny':2,'nx':2})
        grid = grid.interpolateToGrid(sampledict,method=method)
        if method == 'nearest':
            output = np.array([[30.0,31.0],[37.0,38.0]])
        elif method == 'linear':
            output = np.array([[34.,35.],[41.,42.]])
        elif method == 'cubic':
            output = np.array([[34.,35.],[41.,42.]])
        else:
            pass
        np.testing.assert_almost_equal(grid.getData(),output)
        print('Passed interpolate with method "%s".' % method)
Ejemplo n.º 28
0
def test_getvalue():
    array = np.arange(1, 26).reshape(5, 5)
    gdict = GeoDict({'xmin': 1.0,
                     'xmax': 5.0,
                     'ymin': 1.0,
                     'ymax': 5.0,
                     'dx': 1.0,
                     'dy': 1.0,
                     'nx': 5,
                     'ny': 5})
    grid = Grid2D(array, gdict)
    assert grid.getValue(3.0, 3.0) == 13
    lat = np.array([3.0, 4.0])
    lon = np.array([3.0, 3.0])
    test = grid.getValue(lat, lon)
    np.testing.assert_almost_equal(test, np.array([13, 8]))
    lat = np.array([[3.0, 4.0],
                    [4.0, 5.0]])
    lon = np.array([[3.0, 3.0],
                    [4.0, 4.0]])
    test = grid.getValue(lat, lon)
    np.testing.assert_almost_equal(test, np.array([[13,  8], [9,  4]]))
Ejemplo n.º 29
0
 def updateSequences(self, stime):
     etime = stime + timedelta(days=1)
     events = search(starttime=stime,
                     endtime=etime,
                     minlatitude=-90,
                     maxlatitude=90,
                     minlongitude=-180,
                     maxlongitude=180,
                     minmagnitude=0.0,
                     maxmagnitude=9.9)
     todayframe = get_summary_data_frame(events)
     todaydata = get_day_counts(GDICT, todayframe)
     todaygrid = Grid2D(data=todaydata, geodict=GDICT)
     for row in range(0, GDICT.ny):
         for col in range(0, GDICT.nx):
             if row == 19 and col == 29:
                 foo = 1
             clat, clon = GDICT.getLatLon(row, col)
             tvalue = todaygrid._data[row, col]
             mvalue = self._meangrid._data[row, col]
             svalue = self._stdgrid._data[row, col]
             # thresh = tvalue > mvalue + svalue * 3
             thresh = tvalue > MINEQ
             xmin = clon - GDICT.dx / 2
             xmax = clon + GDICT.dx / 2
             ymin = clat - GDICT.dy / 2
             ymax = clat + GDICT.dy / 2
             if thresh:
                 c1 = todayframe['latitude'] > ymin
                 c2 = todayframe['latitude'] <= ymax
                 c3 = todayframe['longitude'] > xmin
                 c4 = todayframe['longitude'] <= xmax
                 cluster = todayframe[c1 & c2 & c3 & c4].copy()
                 class_frame, pproj = self.get_clusters(cluster, clon, clat)
                 self.insertSequences(class_frame, pproj)
     # call a method that filters out clusters that don't match the definition
     # of an earthquake sequence.
     self.cleanSequences()
Ejemplo n.º 30
0
    def getSitesContext(self, lldict=None, rock_vs30=None):
        """
        Create a SitesContext object by sampling the current Sites object.

        Args:
            lldict: Either

                - None, in which case the SitesContext for the complete Sites
                  grid is returned, or
                - A location dictionary (elements are 'lats' and 'lons' and
                  each is a numpy array). Each element must have the same
                  shape. In this case the SitesContext for these locaitons is
                  returned.

            rock_vs30: Either

                - None, in which case the SitesContext will reflect the Vs30
                  grid in the Sites instance, or
                - A float for the rock Vs30 value, in which case the
                  SitesContext will be constructed for this constant Vs30
                  value.

        Returns:
            SitesContext object.

        Raises:
            ShakeLibException: When lat/lon input sequences do not share
                dimensionality.

        """  # noqa

        sctx = SitesContext()

        if lldict is not None:
            lats = lldict['lats']
            lons = lldict['lons']
            latshape = lats.shape
            lonshape = lons.shape
            if latshape != lonshape:
                msg = 'Input lat/lon arrays must have the same dimensions'
                raise ShakeLibException(msg)

            if rock_vs30 is not None:
                tmp = self._Vs30.getValue(
                    lats, lons, default=self._defaultVs30)
                sctx.vs30 = np.ones_like(tmp) * rock_vs30
            else:
                sctx.vs30 = self._Vs30.getValue(
                    lats, lons, default=self._defaultVs30)
            sctx.lats = lats
            sctx.lons = lons
        else:
            sctx.lats = self._lats.copy()
            sctx.lons = self._lons.copy()
            if rock_vs30 is not None:
                sctx.vs30 = np.full_like(self._Vs30.getData(), rock_vs30)
            else:
                sctx.vs30 = self._Vs30.getData().copy()

        sctx = Sites._addDepthParameters(sctx)

        # For ShakeMap purposes, vs30 measured is always Fales
        sctx.vs30measured = np.zeros_like(sctx.vs30, dtype=bool)

        # Backarc should be a numpy array
        if lldict is not None:
            backarcgrid = Grid2D(self._backarc, self._Vs30.getGeoDict())
            sctx.backarc = backarcgrid.getValue(lats, lons, default=False)
        else:
            sctx.backarc = self._backarc.copy()

        return sctx
Ejemplo n.º 31
0
def computeCoverage(gdict,
                    inventory,
                    numdiv=30.,
                    method='nearest',
                    proj='moll'):
    """Fast method to produce grid of area actually affected by landsliding in each cell defined by geodict

    :param gdict: geodict, likely taken from model to compare inventory against
    :param inventory: full file path to shapefile of inventory, must be in geographic coordinates, WGS84
    :type inventory: string
    :param numdiv: Approximate amount to subdivide each cell of geodict by to compute areas (higher number slower but more accurate)
    :return inventorygrid: Grid2D object reporting proportional area of landsliding inside each cell defined by geodict
    :param method: method for resampling when projecting back to geographic coordinates, nearest recommended but not perfect. Cubic not recommended.

    :returns: Grid2D object reporting approximate areal coverage of input inventory corresponding to geodict
    """

    lat0 = np.mean((gdict.ymin, gdict.ymax))
    lon0 = np.mean((gdict.xmin, gdict.xmax))
    gdsubdiv = {
        'xmin': gdict.xmin,
        'xmax': gdict.xmax,
        'ymin': gdict.ymin,
        'ymax': gdict.ymax,
        'dx': gdict.dx / numdiv,
        'dy': gdict.dy / numdiv,
        'ny': gdict.ny * numdiv,
        'nx': gdict.nx * numdiv
    }
    subgd = GeoDict(gdsubdiv, adjust='res')

    f = fiona.open(inventory)

    invshp = list(f.items())
    f.close()
    shapes = [shape(inv[1]['geometry']) for inv in invshp]

    # Rasterize with oversampled area
    rast = Grid2D.rasterizeFromGeometry(shapes,
                                        subgd,
                                        fillValue=0.,
                                        burnValue=1.0,
                                        mustContainCenter=True)

    # Transform to equal area projection
    projs = '+proj=%s +datum=WGS84 +lat_0=%0.5f +lon_0=%0.5F +units=meters +x_0=0 +y_0=0' % (
        proj, lat0, lon0)
    equal_area = rast.project(projection=projs)
    egdict = equal_area.getGeoDict()

    gdds = {
        'xmin': egdict.xmin,
        'xmax': egdict.xmax,
        'ymin': egdict.ymin,
        'ymax': egdict.ymax,
        'dx': egdict.dx * numdiv,
        'dy': egdict.dy * numdiv,
        'ny': egdict.ny / numdiv,
        'nx': egdict.nx / numdiv
    }
    dsgd = GeoDict(gdds, adjust='res')

    # NEED METHOD THAT WILL USE BLOCK MEAN OR SUM
    eabig = equal_area.interpolateToGrid(dsgd, method='block_mean')

    # Project back
    eabigproj = eabig.project(projection=gdict.projection)

    # Resample to original grid
    inventorygrid = eabigproj.interpolateToGrid(gdict, method='linear')

    return inventorygrid
Ejemplo n.º 32
0
def test_output_container():
    geodict = GeoDict.createDictFromBox(-118.5, -114.5, 32.1, 36.7, 0.01, 0.02)
    nrows, ncols = geodict.ny, geodict.nx

    # create MMI mean data for maximum component
    mean_mmi_maximum_data = np.random.rand(nrows, ncols)
    mean_mmi_maximum_metadata = {
        'name': 'Gandalf',
        'color': 'white',
        'powers': 'magic'
    }
    mean_mmi_maximum_grid = Grid2D(mean_mmi_maximum_data, geodict)

    # create MMI std data for maximum component
    std_mmi_maximum_data = mean_mmi_maximum_data / 10
    std_mmi_maximum_metadata = {
        'name': 'Legolas',
        'color': 'green',
        'powers': 'good hair'
    }
    std_mmi_maximum_grid = Grid2D(std_mmi_maximum_data, geodict)

    # create MMI mean data for rotd50 component
    mean_mmi_rotd50_data = np.random.rand(nrows, ncols)
    mean_mmi_rotd50_metadata = {
        'name': 'Gimli',
        'color': 'brown',
        'powers': 'axing'
    }
    mean_mmi_rotd50_grid = Grid2D(mean_mmi_rotd50_data, geodict)

    # create MMI std data for rotd50 component
    std_mmi_rotd50_data = mean_mmi_rotd50_data / 10
    std_mmi_rotd50_metadata = {
        'name': 'Aragorn',
        'color': 'white',
        'powers': 'scruffiness'
    }
    std_mmi_rotd50_grid = Grid2D(std_mmi_rotd50_data, geodict)

    # create PGA mean data for maximum component
    mean_pga_maximum_data = np.random.rand(nrows, ncols)
    mean_pga_maximum_metadata = {
        'name': 'Pippin',
        'color': 'purple',
        'powers': 'rashness'
    }
    mean_pga_maximum_grid = Grid2D(mean_pga_maximum_data, geodict)

    # create PGA std data for maximum component
    std_pga_maximum_data = mean_pga_maximum_data / 10
    std_pga_maximum_metadata = {
        'name': 'Merry',
        'color': 'grey',
        'powers': 'hunger'
    }
    std_pga_maximum_grid = Grid2D(std_pga_maximum_data, geodict)

    f, datafile = tempfile.mkstemp()
    os.close(f)
    try:
        container = ShakeMapOutputContainer.create(datafile)
        container.setIMTGrids('mmi',
                              mean_mmi_maximum_grid,
                              mean_mmi_maximum_metadata,
                              std_mmi_maximum_grid,
                              std_mmi_maximum_metadata,
                              component='maximum')
        container.setIMTGrids('mmi',
                              mean_mmi_rotd50_grid,
                              mean_mmi_rotd50_metadata,
                              std_mmi_rotd50_grid,
                              std_mmi_rotd50_metadata,
                              component='rotd50')
        container.setIMTGrids('pga',
                              mean_pga_maximum_grid,
                              mean_pga_maximum_metadata,
                              std_pga_maximum_grid,
                              std_pga_maximum_metadata,
                              component='maximum')

        # get the maximum MMI imt data
        mmi_max_dict = container.getIMTGrids('mmi', component='maximum')
        np.testing.assert_array_equal(mmi_max_dict['mean'].getData(),
                                      mean_mmi_maximum_data)
        np.testing.assert_array_equal(mmi_max_dict['std'].getData(),
                                      std_mmi_maximum_data)
        assert mmi_max_dict['mean_metadata'] == mean_mmi_maximum_metadata
        assert mmi_max_dict['std_metadata'] == std_mmi_maximum_metadata

        # get the rotd50 MMI imt data
        mmi_rot_dict = container.getIMTGrids('mmi', component='rotd50')
        np.testing.assert_array_equal(mmi_rot_dict['mean'].getData(),
                                      mean_mmi_rotd50_data)
        np.testing.assert_array_equal(mmi_rot_dict['std'].getData(),
                                      std_mmi_rotd50_data)
        assert mmi_rot_dict['mean_metadata'] == mean_mmi_rotd50_metadata
        assert mmi_rot_dict['std_metadata'] == std_mmi_rotd50_metadata

        # Check repr method
        assert repr(container) == '''Data type: grid
    use "getIMTGrids" method to access interpolated IMTs
Rupture: None
Config: None
Stations: None
Metadata: None
Available IMTs (components):
    mmi (maximum, rotd50)
    pga (maximum)
'''

        # get list of all imts
        imts = container.getIMTs()

        # get list of maximum imts
        max_imts = container.getIMTs(component='maximum')
        assert sorted(max_imts) == ['mmi', 'pga']

        # get list of components for mmi
        mmi_comps = container.getComponents('mmi')
        assert sorted(mmi_comps) == ['maximum', 'rotd50']

        # Test dropIMT
        imts = container.getIMTs('maximum')
        assert imts == ['mmi', 'pga']
        container.dropIMT('mmi')
        imts = container.getIMTs('maximum')
        assert imts == ['pga']
        container.close()

    except Exception as e:
        raise (e)
    finally:
        os.remove(datafile)
Ejemplo n.º 33
0
def draw_contour(shakegrid,
                 popgrid,
                 oceanfile,
                 oceangridfile,
                 cityfile,
                 basename,
                 borderfile=None,
                 is_scenario=False):
    """Create a contour map showing MMI contours over greyscale population.

    :param shakegrid:
      ShakeGrid object.
    :param popgrid:
      Grid2D object containing population data.
    :param oceanfile:
      String path to file containing ocean vector data in a format compatible
      with fiona.
    :param oceangridfile:
      String path to file containing ocean grid data .
    :param cityfile:
      String path to file containing GeoNames cities data.
    :param basename:
      String path containing desired output PDF base name, i.e.,
      /home/pager/exposure.  ".pdf" and ".png" files will
      be made.
    :param make_png:
      Boolean indicating whether a PNG version of the file should also be
      created in the same output folder as the PDF.
    :returns:
      Tuple containing:
        - Name of PNG file created, or None if PNG output not specified.
        - Cities object containing the cities that were rendered on the
          contour map.
    """
    gd = shakegrid.getGeoDict()

    # Retrieve the epicenter - this will get used on the map
    center_lat = shakegrid.getEventDict()['lat']
    center_lon = shakegrid.getEventDict()['lon']

    # load the ocean grid file (has 1s in ocean, 0s over land)
    # having this file saves us almost 30 seconds!
    oceangrid = read(oceangridfile,
                     samplegeodict=gd,
                     resample=True,
                     doPadding=True)

    # load the cities data, limit to cities within shakemap bounds
    allcities = Cities.fromDefault()
    cities = allcities.limitByBounds((gd.xmin, gd.xmax, gd.ymin, gd.ymax))

    # define the map
    # first cope with stupid 180 meridian
    height = (gd.ymax - gd.ymin) * DEG2KM
    if gd.xmin < gd.xmax:
        width = (gd.xmax - gd.xmin) * np.cos(np.radians(center_lat)) * DEG2KM
        xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax)
    else:
        xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax)
        xmax += 360
        width = ((gd.xmax + 360) - gd.xmin) * \
            np.cos(np.radians(center_lat)) * DEG2KM

    aspect = width / height

    # if the aspect is not 1, then trim bounds in x or y direction
    # as appropriate
    if width > height:
        dw = (width - height) / 2.0  # this is width in km
        xmin = xmin + dw / (np.cos(np.radians(center_lat)) * DEG2KM)
        xmax = xmax - dw / (np.cos(np.radians(center_lat)) * DEG2KM)
        width = (xmax - xmin) * np.cos(np.radians(center_lat)) * DEG2KM
    if height > width:
        dh = (height - width) / 2.0  # this is width in km
        ymin = ymin + dh / DEG2KM
        ymax = ymax - dh / DEG2KM
        height = (ymax - ymin) * DEG2KM

    aspect = width / height
    figheight = FIGWIDTH / aspect
    bbox = (xmin, ymin, xmax, ymax)
    bounds = (xmin, xmax, ymin, ymax)
    figsize = (FIGWIDTH, figheight)

    # Create the MercatorMap object, which holds a separate but identical
    # axes object used to determine collisions between city labels.
    mmap = MercatorMap(bounds, figsize, cities, padding=0.5)
    fig = mmap.figure
    ax = mmap.axes
    # this needs to be done here so that city label collision
    # detection will work
    fig.canvas.draw()

    geoproj = mmap.geoproj
    proj = mmap.proj

    # project our population grid to the map projection
    projstr = proj.proj4_init
    popgrid_proj = popgrid.project(projstr)
    popdata = popgrid_proj.getData()
    newgd = popgrid_proj.getGeoDict()

    # Use our GMT-inspired palette class to create population and MMI colormaps
    popmap = ColorPalette.fromPreset('pop')
    mmimap = ColorPalette.fromPreset('mmi')

    # set the image extent to that of the data
    img_extent = (newgd.xmin, newgd.xmax, newgd.ymin, newgd.ymax)
    plt.imshow(popdata,
               origin='upper',
               extent=img_extent,
               cmap=popmap.cmap,
               vmin=popmap.vmin,
               vmax=popmap.vmax,
               zorder=POP_ZORDER,
               interpolation='nearest')

    # draw 10m res coastlines
    ax.coastlines(resolution="10m", zorder=COAST_ZORDER)

    states_provinces = cfeature.NaturalEarthFeature(
        category='cultural',
        name='admin_1_states_provinces_lines',
        scale='50m',
        facecolor='none')

    ax.add_feature(states_provinces, edgecolor='black', zorder=COAST_ZORDER)

    # draw country borders using natural earth data set
    if borderfile is not None:
        borders = ShapelyFeature(
            Reader(borderfile).geometries(), ccrs.PlateCarree())
        ax.add_feature(borders,
                       zorder=COAST_ZORDER,
                       edgecolor='black',
                       linewidth=2,
                       facecolor='none')

    # clip the ocean data to the shakemap
    bbox = (gd.xmin, gd.ymin, gd.xmax, gd.ymax)
    oceanshapes = _clip_bounds(bbox, oceanfile)

    ax.add_feature(ShapelyFeature(oceanshapes, crs=geoproj),
                   facecolor=WATERCOLOR,
                   zorder=OCEAN_ZORDER)

    # So here we're going to project the MMI data to
    # our mercator map, then smooth and contour that
    # projected grid.

    # smooth the MMI data for contouring, themn project
    mmi = shakegrid.getLayer('mmi').getData()
    smoothed_mmi = gaussian_filter(mmi, FILTER_SMOOTH)
    newgd = shakegrid.getGeoDict().copy()
    smooth_grid = Grid2D(data=smoothed_mmi, geodict=newgd)
    smooth_grid_merc = smooth_grid.project(projstr)
    newgd2 = smooth_grid_merc.getGeoDict()

    # project the ocean grid
    oceangrid_merc = oceangrid.project(projstr)

    # create masked arrays using the ocean grid
    data_xmin, data_xmax = newgd2.xmin, newgd2.xmax
    data_ymin, data_ymax = newgd2.ymin, newgd2.ymax
    smooth_data = smooth_grid_merc.getData()
    landmask = np.ma.masked_where(oceangrid_merc._data == 0.0, smooth_data)
    oceanmask = np.ma.masked_where(oceangrid_merc._data == 1.0, smooth_data)

    # contour the data
    contourx = np.linspace(data_xmin, data_xmax, newgd2.nx)
    contoury = np.linspace(data_ymin, data_ymax, newgd2.ny)
    ax.contour(
        contourx,
        contoury,
        np.flipud(oceanmask),
        linewidths=3.0,
        linestyles='solid',
        zorder=1000,
        cmap=mmimap.cmap,
        vmin=mmimap.vmin,
        vmax=mmimap.vmax,
        levels=np.arange(0.5, 10.5, 1.0),
    )

    ax.contour(
        contourx,
        contoury,
        np.flipud(landmask),
        linewidths=2.0,
        linestyles='dashed',
        zorder=OCEANC_ZORDER,
        cmap=mmimap.cmap,
        vmin=mmimap.vmin,
        vmax=mmimap.vmax,
        levels=np.arange(0.5, 10.5, 1.0),
    )

    # the idea here is to plot invisible MMI contours at integer levels
    # and then label them. clabel method won't allow text to appear,
    # which is this case is kind of ok, because it allows us an
    # easy way to draw MMI labels as roman numerals.
    cs_land = plt.contour(
        contourx,
        contoury,
        np.flipud(oceanmask),
        linewidths=0.0,
        levels=np.arange(0, 11),
        alpha=0.0,
        zorder=CLABEL_ZORDER,
    )

    clabel_text = ax.clabel(cs_land,
                            cs_land.cvalues,
                            colors='k',
                            fmt='%.0f',
                            fontsize=40)
    for clabel in clabel_text:
        x, y = clabel.get_position()
        label_str = clabel.get_text()
        roman_label = MMI_LABELS[label_str]
        th = plt.text(x,
                      y,
                      roman_label,
                      zorder=CLABEL_ZORDER,
                      ha='center',
                      va='center',
                      color='black',
                      weight='normal',
                      size=16)
        th.set_path_effects([
            path_effects.Stroke(linewidth=2.0, foreground='white'),
            path_effects.Normal()
        ])

    cs_ocean = plt.contour(
        contourx,
        contoury,
        np.flipud(landmask),
        linewidths=0.0,
        levels=np.arange(0, 11),
        zorder=CLABEL_ZORDER,
    )

    clabel_text = ax.clabel(cs_ocean,
                            cs_ocean.cvalues,
                            colors='k',
                            fmt='%.0f',
                            fontsize=40)
    for clabel in clabel_text:
        x, y = clabel.get_position()
        label_str = clabel.get_text()
        roman_label = MMI_LABELS[label_str]
        th = plt.text(x,
                      y,
                      roman_label,
                      ha='center',
                      va='center',
                      color='black',
                      weight='normal',
                      size=16)
        th.set_path_effects([
            path_effects.Stroke(linewidth=2.0, foreground='white'),
            path_effects.Normal()
        ])

    # draw meridians and parallels using Cartopy's functions for that
    gl = ax.gridlines(draw_labels=True,
                      linewidth=2,
                      color=(0.9, 0.9, 0.9),
                      alpha=0.5,
                      linestyle='-',
                      zorder=GRID_ZORDER)
    gl.xlabels_top = False
    gl.xlabels_bottom = False
    gl.ylabels_left = False
    gl.ylabels_right = False
    gl.xlines = True

    # let's floor/ceil the edges to nearest half a degree
    gxmin = np.floor(xmin * 2) / 2
    gxmax = np.ceil(xmax * 2) / 2
    gymin = np.floor(ymin * 2) / 2
    gymax = np.ceil(ymax * 2) / 2

    xlocs = np.linspace(gxmin, gxmax + 0.5, num=5)
    ylocs = np.linspace(gymin, gymax + 0.5, num=5)

    gl.xlocator = mticker.FixedLocator(xlocs)
    gl.ylocator = mticker.FixedLocator(ylocs)
    gl.xformatter = LONGITUDE_FORMATTER
    gl.yformatter = LATITUDE_FORMATTER
    gl.xlabel_style = {'size': 15, 'color': 'black'}
    gl.ylabel_style = {'size': 15, 'color': 'black'}

    # TODO - figure out x/y axes data coordinates
    # corresponding to 10% from left and 10% from top
    # use geoproj and proj
    dleft = 0.01
    dtop = 0.97
    proj_str = proj.proj4_init
    merc_to_dd = pyproj.Proj(proj_str)

    # use built-in transforms to get from axes units to data units
    display_to_data = ax.transData.inverted()
    axes_to_display = ax.transAxes

    # these are x,y coordinates in projected space
    yleft, t1 = display_to_data.transform(
        axes_to_display.transform((dleft, 0.5)))
    t2, xtop = display_to_data.transform(axes_to_display.transform(
        (0.5, dtop)))

    # these are coordinates in lon,lat space
    yleft_dd, t1_dd = merc_to_dd(yleft, t1, inverse=True)
    t2_dd, xtop_dd = merc_to_dd(t2, xtop, inverse=True)

    # drawing our own tick labels INSIDE the plot, as
    # Cartopy doesn't seem to support this.
    yrange = ymax - ymin
    xrange = xmax - xmin
    ddlabelsize = 12
    for xloc in gl.xlocator.locs:
        outside = xloc < xmin or xloc > xmax
        # don't draw labels when we're too close to either edge
        near_edge = (xloc - xmin) < (xrange * 0.1) or (xmax - xloc) < (xrange *
                                                                       0.1)
        if outside or near_edge:
            continue
        xtext = r'$%.1f^\circ$W' % (abs(xloc))
        ax.text(xloc,
                xtop_dd,
                xtext,
                fontsize=ddlabelsize,
                zorder=GRID_ZORDER,
                ha='center',
                fontname=DEFAULT_FONT,
                transform=ccrs.Geodetic())

    for yloc in gl.ylocator.locs:
        outside = yloc < gd.ymin or yloc > gd.ymax
        # don't draw labels when we're too close to either edge
        near_edge = (yloc - gd.ymin) < (yrange * 0.1) or (gd.ymax - yloc) < (
            yrange * 0.1)
        if outside or near_edge:
            continue
        if yloc < 0:
            ytext = r'$%.1f^\circ$S' % (abs(yloc))
        else:
            ytext = r'$%.1f^\circ$N' % (abs(yloc))
        ax.text(yleft_dd,
                yloc,
                ytext,
                fontsize=ddlabelsize,
                zorder=GRID_ZORDER,
                va='center',
                fontname=DEFAULT_FONT,
                transform=ccrs.Geodetic())

    # draw cities
    mapcities = mmap.drawCities(shadow=True, zorder=CITIES_ZORDER)

    # draw the figure border thickly
    # TODO - figure out how to draw map border
    # bwidth = 3
    # ax.spines['top'].set_visible(True)
    # ax.spines['left'].set_visible(True)
    # ax.spines['bottom'].set_visible(True)
    # ax.spines['right'].set_visible(True)
    # ax.spines['top'].set_linewidth(bwidth)
    # ax.spines['right'].set_linewidth(bwidth)
    # ax.spines['bottom'].set_linewidth(bwidth)
    # ax.spines['left'].set_linewidth(bwidth)

    # Get the corner of the map with the lowest population
    corner_rect, filled_corner = _get_open_corner(popgrid, ax)
    clat2 = round_to_nearest(center_lat, 1.0)
    clon2 = round_to_nearest(center_lon, 1.0)

    # draw a little globe in the corner showing in small-scale
    # where the earthquake is located.
    proj = ccrs.Orthographic(central_latitude=clat2, central_longitude=clon2)
    ax2 = fig.add_axes(corner_rect, projection=proj)
    ax2.add_feature(cfeature.OCEAN,
                    zorder=0,
                    facecolor=WATERCOLOR,
                    edgecolor=WATERCOLOR)
    ax2.add_feature(cfeature.LAND, zorder=0, edgecolor='black')
    ax2.plot([clon2], [clat2],
             'w*',
             linewidth=1,
             markersize=16,
             markeredgecolor='k',
             markerfacecolor='r')
    ax2.gridlines()
    ax2.set_global()
    ax2.outline_patch.set_edgecolor('black')
    ax2.outline_patch.set_linewidth(2)

    # Draw the map scale in the unoccupied lower corner.
    corner = 'lr'
    if filled_corner == 'lr':
        corner = 'll'
    draw_scale(ax, corner, pady=0.05, padx=0.05)

    # Draw the epicenter as a black star
    plt.sca(ax)
    plt.plot(center_lon,
             center_lat,
             'k*',
             markersize=16,
             zorder=EPICENTER_ZORDER,
             transform=geoproj)

    if is_scenario:
        plt.text(center_lon,
                 center_lat,
                 'SCENARIO',
                 fontsize=64,
                 zorder=WATERMARK_ZORDER,
                 transform=geoproj,
                 alpha=0.2,
                 color='red',
                 horizontalalignment='center')

    # create pdf and png output file names
    pdf_file = basename + '.pdf'
    png_file = basename + '.png'

    # save to pdf
    plt.savefig(pdf_file)
    plt.savefig(png_file)

    return (pdf_file, png_file, mapcities)