コード例 #1
0
    def test_term_frequency_untiled_one_shape(self):
        dataset_catalog_file = get_path("../catalog/cdl_2014_untiled.json")
        rd = read_catalog(dataset_catalog_file)
        df = self.make_dataframe(rd.query(self.vl_one_shape))
        # Compare against expected output computed via single-tile computation
        # read from a file.
        df_distance = (self.df_expected_one_shape - df.loc[self.one_shape_id])\
            .apply(np.abs)

        assert(df_distance[1] < 0.05)
        assert(df_distance[5] < 0.05)
コード例 #2
0
    def test_term_frequency_untiled_one_shape(self):
        dataset_catalog_file = get_path("../catalog/cdl_2014_untiled.json")
        rd = read_catalog(dataset_catalog_file)
        df = self.make_dataframe(rd.query(self.vl_one_shape))
        # Compare against expected output computed via single-tile computation
        # read from a file.
        df_distance = (self.df_expected_one_shape - df.loc[self.one_shape_id])\
            .apply(np.abs)

        assert (df_distance[1] < 0.05)
        assert (df_distance[5] < 0.05)
コード例 #3
0
    def test_co_ne_border(self):
        p = get_path("vector/clu/clu_public_a_co095.shp")
        vl, _ = read_layer(p)
        p = get_path("vector/co_ne_border.geojson")
        co_ne_border, df = read_geojson(p)
        vl = vl.within(co_ne_border.bbox())

        rd = read_catalog(get_path("../catalog/co_soil.json"))
        for r in rd.query(vl):
            r
            #compute_stats(r.values, r.weights)

        rd = read_catalog(get_path("../catalog/co_soil_bad.json"))
        failed = False
        try:
            for r in rd.query(vl):
                r
                #compute_stats(r.values, r.weights)
        except IndexError:
            failed = True

        assert failed
コード例 #4
0
    def test_co_ne_border(self):
        p = get_path("vector/clu/clu_public_a_co095.shp")
        vl, _ = read_layer(p)
        p = get_path("vector/co_ne_border.geojson")
        co_ne_border, df = read_geojson(p)
        vl = vl.within(co_ne_border.bbox())

        rd = read_catalog(get_path("../catalog/co_soil.json"))
        for r in rd.query(vl):
            r
            #compute_stats(r.values, r.weights)

        rd = read_catalog(get_path("../catalog/co_soil_bad.json"))
        failed = False
        try:
            for r in rd.query(vl):
                r
                #compute_stats(r.values, r.weights)
        except IndexError:
            failed = True

        assert failed
コード例 #5
0
    def test_shapes_outside_raster_should_be_filtered(self):
        # 2 shapes in IL that should be in range,
        # plus 2 shapes in CA that should be out of range.
        p = get_path("vector/clu/four_shapes_2il_2ca.geojson")
        vl_outside, vl_df = read_geojson(p)
        assert (len(vl_outside.keys()) == 4)

        dataset_catalog_file = get_path("../catalog/cdl_2014.json")
        rd = read_catalog(dataset_catalog_file)
        df = self.make_dataframe(rd.query(vl_outside))
        assert (len(df.index) == 4)
        sums = df.sum(axis=1).map(int)
        assert df[sums < 1e-6].shape[0] == 2
        assert df[(sums - 1).map(np.abs) < 1e-6].shape[0] == 2
コード例 #6
0
    def test_term_frequency_untiled_all_shapes(self):
        dataset_catalog_file = get_path("../catalog/cdl_2014_untiled.json")
        rd = read_catalog(dataset_catalog_file)
        df = self.make_dataframe(rd.query(self.vl))

        assert (len(df.index) == 23403)

        # Compare against expected output computed via single-tile computation
        # read from a file.
        df_distance = (self.df_expected - df).applymap(np.abs)
        corn_error = (df_distance[1] * self.areas).sum()
        soy_error = (df_distance[5] * self.areas).sum()
        assert (corn_error < 0.02), corn_error
        assert (soy_error < 0.02), soy_error
コード例 #7
0
    def test_shapes_outside_raster_should_be_filtered(self):
        # 2 shapes in IL that should be in range,
        # plus 2 shapes in CA that should be out of range.
        p = get_path("vector/clu/four_shapes_2il_2ca.geojson")
        vl_outside, vl_df = read_geojson(p)
        assert (len(vl_outside.keys()) == 4)

        dataset_catalog_file = get_path("../catalog/cdl_2014.json")
        rd = read_catalog(dataset_catalog_file)
        df = self.make_dataframe(rd.query(vl_outside))
        assert (len(df.index) == 4)
        sums = df.sum(axis=1).map(int)
        assert df[sums < 1e-6].shape[0] == 2
        assert df[(sums - 1).map(np.abs) < 1e-6].shape[0] == 2
コード例 #8
0
    def test_term_frequency_untiled_all_shapes(self):
        dataset_catalog_file = get_path("../catalog/cdl_2014_untiled.json")
        rd = read_catalog(dataset_catalog_file)
        df = self.make_dataframe(rd.query(self.vl))

        assert (len(df.index) == 23403)

        # Compare against expected output computed via single-tile computation
        # read from a file.
        df_distance = (self.df_expected - df).applymap(np.abs)
        corn_error = (df_distance[1]*self.areas).sum()
        soy_error = (df_distance[5]*self.areas).sum()
        assert(corn_error < 0.02), corn_error
        assert(soy_error < 0.02), soy_error
コード例 #9
0
    def setup_class(cls):
        os.chdir(base)
        # Let's add some CLUs for Washington County, IL
        cls.vl, _ = read_layer(get_path("vector/clu/clu_public_a_il189.shp"),
                               index="uniqueid")

        # Figure out which raster files we'll need.
        cls.dataset = read_catalog(get_path("../catalog/cdl_2014.json"))

        # Create a RasterBand for the raster. Raster data is stored
        # and read from there.
        cls.rb = RasterBand(get_path("raster/95000_45000.tif"))

        # Convert CLUs to pixels.
        cls.px_shps = cls.dataset.to_pixels(cls.vl)
コード例 #10
0
        def processplace(placeid, urbanextenttable, uidata):
            BASERASTERPATH = os.environ.get("HIRASTERBASE",
                                            '/data/rasterstorage')

            RASTERSETS = {
                'grump2000': 'grump/population2000.json',
                'grump2005': 'grump/population2005.json',
                'grump2010': 'grump/population2010.json',
                'grump2015': 'grump/population2015.json',
                'grump2020': 'grump/population2020.json',
                'landscanpopulation': 'landscan/landscan.json',
                'impervious': 'nlcd/impervious/nlcd_impervious_2011.json',
                'nlcd': 'nlcd/landcover/nlcd_landcover_2011.json'
            }
            #               'nlcd/impervious/nlcd_impervious_2001.json',
            #              'nlcd/impervious/nlcd_impervious_2006.json',

            results = get_geom_from_postgis(urbanextenttable, placeid)
            urbanextentgeom = from_series(pd.Series([results[0]]))
            urbanextentgeomjson = results[1]
            urbanextentarea = results[2]

            bufferextentgeom = None
            bufferextentgeomjson = None
            if uidata['extentbuffer']:
                results = get_geom_from_postgis_buffer(
                    urbanextenttable, placeid,
                    float(uidata['extentbuffer'][0]))
                bufferextentgeom = from_series(pd.Series([results[0]]))
                bufferextentgeomjson = results[1]
                bufferextentarea = results[2]
            elif uidata['fixedbuffer']:
                results = get_geom_from_postgis_buffer(
                    urbanextenttable, placeid, int(uidata['fixedbuffer'][0]),
                    True)
                bufferextentgeom = from_series(pd.Series([results[0]]))
                bufferextentgeomjson = results[1]
                bufferextentarea = results[2]

            rasterresults = {}

            print bufferextentgeom
            for rasterkey in RASTERSETS.keys():
                if uidata.get(rasterkey, None):
                    try:
                        tempr = read_catalog(
                            op.join(BASERASTERPATH, RASTERSETS[rasterkey]))
                        rasterresults[rasterkey] = {}
                        result = tempr.query(urbanextentgeom).next()
                        rasterresults[rasterkey]['urbanextent'] = {
                            'sum':
                            float(result.values.sum()),
                            'mean':
                            float(result.values.mean()),
                            'weightedmean':
                            float((result.values * result.weights).sum() /
                                  result.weights.sum()),
                            'std':
                            float(result.values.std()),
                            'weightedsum':
                            float((result.values * result.weights).sum()),
                            'min':
                            float(result.values.min()),
                            'max':
                            float(result.values.max())
                            #                         'valuecounts': dict(result.value_counts())
                        }
                        if bufferextentgeomjson:
                            result = tempr.query(bufferextentgeom).next()
                            rasterresults[rasterkey]['bufferextent'] = {
                                'sum':
                                float(result.values.sum()),
                                'mean':
                                float(result.values.mean()),
                                'weightedmean':
                                float((result.values * result.weights).sum() /
                                      result.weights.sum()),
                                'std':
                                float(result.values.std()),
                                'weightedsum':
                                float((result.values * result.weights).sum()),
                                'min':
                                float(result.values.min()),
                                'max':
                                float(result.values.max())
                                #                             'valuecounts': dict(result.value_counts())
                            }
                    except Exception, e:
                        traceback.print_exc()
                        print e

                        pass
コード例 #11
0
 def test_unconventional_tilepath(self):
     dataset_catalog_file = get_path("../catalog/cdl_2014_tilepath.json")
     rd = read_catalog(dataset_catalog_file, workdir='data/raster')
     df = self.make_dataframe(rd.query(self.vl_one_shape))
     assert (len(df.index) == 1)
コード例 #12
0
                'ruralgeomwgs84': wkb.loads(str(firstitem[2])),
            }
        except Exception, e:
            print e
            print placeid
            print s['usgsplacename']
            print s['gnisid']

    print "loading the dem.tif file"
    demfile = "/Volumes/UrbisBackup/rasterstorage/dem/dem.tif"
    demsrc = rasterio.open(demfile)
    profile = demsrc.profile

    print "loading the dem.json file"
    dempyspatialpath = "/Volumes/UrbisBackup/rasterstorage/dem/dem.json"
    dempyspatial = read_catalog(dempyspatialpath)

    print "doing", s[u'usgsplacename']
    if s['censusurb'].get('processbuffer', False):
        return (
            placeid,
            s,
        )
    try:

        buffergeom = mapping(s['censusurb']['ruralgeom'])

        try:
            os.mkdir('scratch')
        except:
            pass
コード例 #13
0
 def test_unconventional_tilepath(self):
     dataset_catalog_file = get_path("../catalog/cdl_2014_tilepath.json")
     rd = read_catalog(dataset_catalog_file, workdir='data/raster')
     df = self.make_dataframe(rd.query(self.vl_one_shape))
     assert (len(df.index) == 1)