def run(self):
        lulc_nodata = raster_utils.get_nodata_from_uri(GLOBAL_LANDCOVER_URI)

        forest_lulc_codes = [1, 2, 3, 4, 5]

        mask_uri = os.path.join(OUTPUT_DIR, "forest_mask.tif")
        mask_nodata = 2

        def mask_nonforest(lulc):
            """Takes in a numpy array of landcover values and returns 1s
                where they match forest codes and 0 otherwise"""
            mask = numpy.empty(lulc.shape, dtype=numpy.int8)
            mask[:] = 1
            for lulc_code in forest_lulc_codes:
                mask[lulc == lulc_code] = 0
            mask[lulc == lulc_nodata] = mask_nodata
            return mask

        cell_size = raster_utils.get_cell_size_from_uri(GLOBAL_LANDCOVER_URI)
        raster_utils.vectorize_datasets(
            [GLOBAL_LANDCOVER_URI,], mask_nonforest, mask_uri, gdal.GDT_Byte,
            mask_nodata, cell_size, 'intersection', dataset_to_align_index=0,
            dataset_to_bound_index=None, aoi_uri=None,
            assert_datasets_projected=True, process_pool=None,
            vectorize_op=False, datasets_are_pre_aligned=True)

        raster_utils.distance_transform_edt(
            mask_uri, FOREST_EDGE_DISTANCE_URI)
 def run(self):
     nodata = raster_utils.get_nodata_from_uri(UNION_LANDCOVER_URI)
     cell_size = raster_utils.get_cell_size_from_uri(UNION_LANDCOVER_URI)
     raster_utils.vectorize_datasets(
         [UNION_LANDCOVER_URI, UNION_BIOMASS_URI], lambda x, y: x,
         GLOBAL_LANDCOVER_URI,
         gdal.GDT_Int16, nodata, cell_size, "intersection",
         dataset_to_align_index=0, vectorize_op=False)
def _align_raster_with_biomass(input_uri, output_uri):
    """Function to use internally to take an input and align it with the
        GLOBAL_BIOMASS_URI raster"""
    nodata = raster_utils.get_nodata_from_uri(input_uri)
    if nodata is None:
        nodata = -9999
    cell_size = raster_utils.get_cell_size_from_uri(GLOBAL_BIOMASS_URI)
    raster_utils.vectorize_datasets(
        [input_uri, GLOBAL_BIOMASS_URI], lambda x, y: x,
        output_uri, gdal.GDT_Float32, nodata, cell_size, "dataset",
        dataset_to_bound_index=1, vectorize_op=False)
    def run(self):
        def union_op(*array_list):
            """Given an array stack return an array that has a value defined
                in the stack that is not nodata.  used for overlapping nodata
                stacks."""
            output_array = array_list[0]
            for array in array_list[1:]:
                output_array = numpy.where(
                    array != nodata, array, output_array)
            return output_array
        nodata = raster_utils.get_nodata_from_uri(self.dataset_uri_list[0])
        cell_size = raster_utils.get_cell_size_from_uri(
            self.dataset_uri_list[0])

        raster_utils.vectorize_datasets(
            list(self.dataset_uri_list), union_op, self.dataset_out_uri,
            gdal.GDT_Int16, nodata, cell_size, "union",
            dataset_to_align_index=0, vectorize_op=False)
コード例 #5
0
	def run(self):
		biomass_raster_list = [
			"C:/Users/rpsharp/Dropbox_stanford/Dropbox/forest_edge_carbon/af_biov2ct1.tif",
			"C:/Users/rpsharp/Dropbox_stanford/Dropbox/forest_edge_carbon/am_biov2ct1.tif",
			"C:/Users/rpsharp/Dropbox_stanford/Dropbox/forest_edge_carbon/as_biov2ct1.tif",
		]

		nodata = raster_utils.get_nodata_from_uri(biomass_raster_list[0])
		cell_size = raster_utils.get_cell_size_from_uri(biomass_raster_list[0])

		def union_op(*biomass_array_list):
			output_array = biomass_array_list[0]
			for biomass_array in biomass_array_list[1:]:
				output_array = numpy.where(
					biomass_array != nodata, biomass_array, output_array)
			return output_array

		raster_utils.create_directories([os.path.dirname(self.output_uri)])

		raster_utils.vectorize_datasets(
	        biomass_raster_list, union_op, self.output_uri, gdal.GDT_Int16,
	        nodata, cell_size, 'union', dataset_to_align_index=0,
	        vectorize_op=False)
def average_layers():

    base_table_uri = "C:/Users/rich/Desktop/all_grid_results_100km_clean_v2.csv"
    base_table_file = open(base_table_uri, 'rU')
    table_header = base_table_file.readline()

    #need to mask the average layers to the biomass regions

    giant_layer_uri = "C:/Users/rich/Desktop/average_layers_projected/giant_layer.tif"

    af_uri = "C:/Users/rich/Desktop/af_biov2ct1.tif"
    am_uri = "C:/Users/rich/Desktop/am_biov2ct1.tif"
    as_uri = "C:/Users/rich/Desktop/as_biov2ct1.tif"
    cell_size = raster_utils.get_cell_size_from_uri(am_uri)
    #raster_utils.vectorize_datasets(
    #    [af_uri, am_uri, as_uri], lambda x,y,z: x+y+z, giant_layer_uri, gdal.GDT_Float32,
    #    -1, cell_size, 'union', vectorize_op=False)

    table_uri = base_table_uri
    table_file = open(table_uri, 'rU')
    
    table_header = table_file.readline().rstrip()


    lookup_table = raster_utils.get_lookup_from_csv(table_uri, 'ID100km')

    out_table_uri =  "C:/Users/rich/Desktop/all_grid_results_100km_human_elevation.csv"
    out_table_file = codecs.open(out_table_uri, 'w', 'utf-8')

    average_raster_list = [
        ("C:/Users/rich/Desktop/average_layers_projected/lighted_area_luminosity.tif", 'Lighted area density'),
        ("C:/Users/rich/Desktop/average_layers_projected/fi_average.tif", 'Fire densities'),
        ("C:/Users/rich/Desktop/average_layers_projected/glbctd1t0503m.tif", 'FAO_Cattle'),
        ("C:/Users/rich/Desktop/average_layers_projected/glbgtd1t0503m.tif", 'FAO_Goat'),
        ("C:/Users/rich/Desktop/average_layers_projected/glbpgd1t0503m.tif", 'FAO_Pig'),
        ("C:/Users/rich/Desktop/average_layers_projected/glbshd1t0503m.tif", 'FAO_Sheep'),
        ("C:/Users/rich/Desktop/average_layers_projected/glds00ag.tif", 'Human population density AG'),
        ("C:/Users/rich/Desktop/average_layers_projected/glds00g.tif", 'Human population density G'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_11.tif', '"11: Urban, Dense settlement"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_12.tif', '"12: Dense settlements, Dense settlements"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_22.tif', '"22: Irrigated villages, Villages"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_23.tif', '"23: Cropped & pastoral villages, Villages"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_24.tif', '"24: Pastoral villages, Villages"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_25.tif', '"25: Rainfed villages, Villages"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_26.tif', '"26: Rainfed mosaic villages, Villages"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_31.tif', '"31: Residential irrigated cropland, Croplands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_32.tif', '"32: Residential rainfed mosaic, Croplands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_33.tif', '"33: Populated irrigated cropland,   Croplands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_34.tif', '"34: Populated rainfed cropland, Croplands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_35.tif', '"35: Remote croplands, Croplands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_41.tif', '"41: Residential rangelands, Rangelands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_42.tif', '"42: Populated rangelands, Rangelands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_43.tif', '"43: Remote rangelands, Rangelands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_51.tif', '"51: Populated forests, Forested"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_52.tif', '"52: Remote forests, Forested"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_61.tif', '"61: Wild forests, Wildlands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_62.tif', '"62: Sparse trees, Wildlands"'),
        ('C:/Users/rich/Desktop/average_layers_projected/anthrome_63.tif', '"63: Barren, Wildlands"'),
        ("C:/Users/rich/Desktop/average_layers_projected/5km_global_pantropic_dem.tif", '"Average Elevation"'),
        ]

    clipped_raster_list = []


    for average_raster_uri, header in average_raster_list:
        print 'clipping ' + average_raster_uri
        clipped_raster_uri = os.path.join(os.path.dirname(average_raster_uri), 'temp', os.path.basename(average_raster_uri))
        cell_size = raster_utils.get_cell_size_from_uri(average_raster_uri)
        raster_utils.vectorize_datasets(
            [average_raster_uri, giant_layer_uri], lambda x,y: x, clipped_raster_uri, gdal.GDT_Float32,
            -1, cell_size, 'intersection', vectorize_op=False)
        clipped_raster_list.append((clipped_raster_uri, header))

    dataset_list = [gdal.Open(uri) for uri, label in clipped_raster_list]
    band_list = [ds.GetRasterBand(1) for ds in dataset_list]
    nodata_list = [band.GetNoDataValue() for band in band_list]

    extended_table_headers = ','.join([header for _, header in average_raster_list])


    def write_to_file(value):
        try:
            out_table_file.write(value)
        except UnicodeDecodeError as e:
            out_table_file.write(value.decode('latin-1'))

    write_to_file(table_header + ',' + extended_table_headers + '\n')
    #print table_header + ',' + extended_table_headers

    for line in table_file:
        split_line = line.rstrip().split(',')
        grid_id = split_line[2]
    #for grid_id in lookup_table:
        try:
            split_grid_id = grid_id.split('-')
            grid_row_index, grid_col_index = map(int, split_grid_id)
        except ValueError as e:
            month_to_number = {
                'Jan': 1,
                'Feb': 2,
                'Mar': 3,
                'Apr': 4,
                'May': 5,
                'Jun': 6,
                'Jul': 7,
                'Aug': 8,
                'Sep': 9,
                'Oct': 10,
                'Nov': 11,
                'Dec': 12,
            }
            grid_row_index, grid_col_index = month_to_number[split_grid_id[0]], int(split_grid_id[1])
            
        print 'processing grid id ' + grid_id

        ds = dataset_list[0]
        base_srs = osr.SpatialReference(ds.GetProjection())
        lat_lng_srs = base_srs.CloneGeogCS()
        coord_transform = osr.CoordinateTransformation(
            base_srs, lat_lng_srs)
        gt = ds.GetGeoTransform()
        grid_resolution = 100 #100km
        
        row_coord = grid_row_index * grid_resolution * 1000 + GLOBAL_UPPER_LEFT_ROW
        col_coord = grid_col_index * grid_resolution * 1000 + GLOBAL_UPPER_LEFT_COL

        lng_coord, lat_coord, _ = coord_transform.TransformPoint(
            col_coord, row_coord)
        write_to_file(','.join(split_line[0:2]) + ',%d-%d,' % (grid_row_index, grid_col_index) + ','.join(split_line[3:11]) +',%f,%f,' % (lat_coord, lng_coord)+','.join(split_line[13:]))

        for (_, header), band, ds, nodata in zip(clipped_raster_list, band_list, dataset_list, nodata_list):

            gt = ds.GetGeoTransform()
            n_rows = ds.RasterYSize
            n_cols = ds.RasterXSize
               
            xoff = int(grid_col_index * (grid_resolution * 1000.0) / (gt[1]))
            yoff = int(grid_row_index * (grid_resolution * 1000.0) / (-gt[5]))
            win_xsize = int((grid_resolution * 1000.0) / (gt[1]))
            win_ysize = int((grid_resolution * 1000.0) / (gt[1]))

            if xoff + win_xsize > n_cols:
                win_xsize = n_cols - xoff
            if yoff + win_ysize > n_rows:
                win_ysize = n_rows - yoff

            block = band.ReadAsArray(
                xoff=xoff, yoff=yoff, win_xsize=win_xsize, win_ysize=win_ysize)
            block_average = numpy.average(block[block != nodata])
            write_to_file(',%f' % block_average)
        write_to_file('\n')
コード例 #7
0
import os
import numpy
import gdal

from invest_natcap import raster_utils

anthrome_uri = "C:/Users/rich/Desktop/average_layers_projected/gl_anthrome.tif"
anthrome_ds = gdal.Open(anthrome_uri)
anthrome_band = anthrome_ds.GetRasterBand(1)

anthrome_array = anthrome_band.ReadAsArray()
unique_anthrome_ids = numpy.unique(anthrome_array)

for anthrome_id in unique_anthrome_ids:
    output_uri = os.path.join(os.path.dirname(anthrome_uri), 'anthrome_%d.tif' % anthrome_id)
    out_pixel_size = raster_utils.get_cell_size_from_uri(anthrome_uri)
    raster_utils.vectorize_datasets(
        [anthrome_uri], lambda x: x==anthrome_id, output_uri, gdal.GDT_Byte,
        127, out_pixel_size, "union", vectorize_op=False)

print numpy.unique(anthrome_array).shape
def process_ecoregion(prefix):
    ecoregion_shapefile_uri = os.path.join(
        DATA_DIR, 'ecoregions', 'ecoregions_projected.shp')

    ecoregion_lookup = raster_utils.extract_datasource_table_by_key(
        ecoregion_shapefile_uri, 'ECO_ID_U')
    ecoregion_nodata = -1
    ecoregion_lookup[ecoregion_nodata] = {
        'ECO_NAME': 'UNKNOWN',
        'ECODE_NAME': 'UNKNOWN',
        'WWF_MHTNAM': 'UNKNOWN',
        }

    lulc_raw_uri = os.path.join(DATA_DIR, '%s%s' % (prefix, LULC_BASE))
    biomass_raw_uri = os.path.join(DATA_DIR, '%s%s' % (prefix, BIOMASS_BASE))

    cell_size = raster_utils.get_cell_size_from_uri(lulc_raw_uri)

    lulc_uri = os.path.join(OUTPUT_DIR, "%s_lulc_aligned.tif" % (prefix))
    biomass_uri = os.path.join(OUTPUT_DIR, "%s_biomass_aligned.tif" % (prefix))

    raster_utils.align_dataset_list(
        [lulc_raw_uri, biomass_raw_uri], [lulc_uri, biomass_uri], ['nearest']*2,
        cell_size, 'intersection', 0, dataset_to_bound_index=None,
        aoi_uri=None, assert_datasets_projected=True, process_pool=None)

    #create ecoregion id
    ecoregion_dataset_uri = os.path.join(
        OUTPUT_DIR, "%s_ecoregion_id.tif" % (prefix))
    raster_utils.new_raster_from_base_uri(
        lulc_uri, ecoregion_dataset_uri, 'GTiff', ecoregion_nodata, gdal.GDT_Int16)
    raster_utils.rasterize_layer_uri(
        ecoregion_dataset_uri, ecoregion_shapefile_uri,
        option_list=["ATTRIBUTE=ECO_ID_U"])

    lulc_nodata = raster_utils.get_nodata_from_uri(lulc_uri)

    forest_lulc_codes = [1, 2, 3, 4, 5]

    mask_uri = os.path.join(OUTPUT_DIR, "%s_mask.tif" % prefix)
    mask_nodata = 2

    def mask_nonforest(lulc):
        mask = numpy.empty(lulc.shape, dtype=numpy.int8)
        mask[:] = 1
        for lulc_code in forest_lulc_codes:
            mask[lulc == lulc_code] = 0
        mask[lulc == lulc_nodata] = mask_nodata
        return mask

    raster_utils.vectorize_datasets(
        [lulc_uri,], mask_nonforest, mask_uri, gdal.GDT_Byte,
        mask_nodata, cell_size, 'intersection', dataset_to_align_index=0,
        dataset_to_bound_index=None, aoi_uri=None,
        assert_datasets_projected=True, process_pool=None, vectorize_op=False,
        datasets_are_pre_aligned=True)

    forest_edge_distance_uri = os.path.join(OUTPUT_DIR, "%s_forest_edge.tif" % prefix)
    raster_utils.distance_transform_edt(mask_uri, forest_edge_distance_uri)

    biomass_stats_uri = os.path.join(OUTPUT_DIR, "%s_biomass_stats.csv" % prefix)
    _aggregate_results(forest_edge_distance_uri, biomass_uri, ecoregion_dataset_uri, ecoregion_lookup, biomass_stats_uri)
def _aggregate_results(forest_edge_distance_uri, biomass_uri, ecoregion_dataset_uri, ecoregion_lookup, biomass_stats_uri):
    cell_size = raster_utils.get_cell_size_from_uri(forest_edge_distance_uri)

    forest_edge_nodata = raster_utils.get_nodata_from_uri(forest_edge_distance_uri)
    biomass_nodata = raster_utils.get_nodata_from_uri(biomass_uri)    

    outfile = open(biomass_stats_uri, 'w')

    ecoregion_dataset = gdal.Open(ecoregion_dataset_uri)
    ecoregion_band = ecoregion_dataset.GetRasterBand(1)

    biomass_ds = gdal.Open(biomass_uri, gdal.GA_ReadOnly)
    biomass_band = biomass_ds.GetRasterBand(1)

    forest_edge_distance_ds = gdal.Open(forest_edge_distance_uri)
    forest_edge_distance_band = forest_edge_distance_ds.GetRasterBand(1)

    n_rows, n_cols = raster_utils.get_row_col_from_uri(biomass_uri)

    base_srs = osr.SpatialReference(biomass_ds.GetProjection())
    lat_lng_srs = base_srs.CloneGeogCS()
    coord_transform = osr.CoordinateTransformation(
        base_srs, lat_lng_srs)
    gt = biomass_ds.GetGeoTransform()

    grid_resolution_list = [25, 50, 100, 150, 200, 300, 400, 500]
    grid_coordinates = dict((resolution, {}) for resolution in grid_resolution_list)

    block_col_size, block_row_size = biomass_band.GetBlockSize()
    n_global_block_rows = int(math.ceil(float(n_rows) / block_row_size))
    n_global_block_cols = int(math.ceil(float(n_cols) / block_col_size))
    
    last_time = time.time()
    for global_block_row in xrange(n_global_block_rows):
        current_time = time.time()
        if current_time - last_time > 5.0:
            print "aggregation %.1f%% complete" % (global_block_row / float(n_global_block_rows) * 100)
            last_time = current_time
        for global_block_col in xrange(n_global_block_cols):
            xoff = global_block_col * block_col_size
            yoff = global_block_row * block_row_size
            win_xsize = min(block_col_size, n_cols - xoff)
            win_ysize = min(block_row_size, n_rows - yoff)
            biomass_block = biomass_band.ReadAsArray(
                xoff=xoff, yoff=yoff, win_xsize=win_xsize, win_ysize=win_ysize)
            forest_edge_distance_block = forest_edge_distance_band.ReadAsArray(
                xoff=xoff, yoff=yoff, win_xsize=win_xsize, win_ysize=win_ysize)
            ecoregion_id_block = ecoregion_band.ReadAsArray(
                xoff=xoff, yoff=yoff, win_xsize=win_xsize, win_ysize=win_ysize)

            for global_row in xrange(global_block_row*block_row_size, min((global_block_row+1)*block_row_size, n_rows)):
                for global_col in xrange(global_block_col*block_col_size, min((global_block_col+1)*block_col_size, n_cols)):
                    row_coord = gt[3] + global_row * gt[5]    
                    col_coord = gt[0] + global_col * gt[1]

                    local_row = global_row - global_block_row * block_row_size
                    local_col = global_col - global_block_col * block_col_size

                    lng_coord, lat_coord, _ = coord_transform.TransformPoint(
                        col_coord, row_coord)

                    #normalize the coordinates so they don't go negative
                    global_grid_row = row_coord - GLOBAL_UPPER_LEFT_ROW
                    global_grid_col = col_coord - GLOBAL_UPPER_LEFT_COL

                    ecoregion_id = ecoregion_id_block[local_row, local_col]
                    if (forest_edge_distance_block[local_row, local_col] != forest_edge_nodata and
                            forest_edge_distance_block[local_row, local_col] > 0.0 and
                            biomass_block[local_row, local_col] != biomass_nodata):

                        outfile.write("%f;%f;%f;%f;%s;%s;%s" % (
                            forest_edge_distance_block[local_row, local_col] * cell_size,
                            biomass_block[local_row, local_col], lat_coord, lng_coord,
                            ecoregion_lookup[ecoregion_id]['ECO_NAME'],
                            ecoregion_lookup[ecoregion_id]['ECODE_NAME'],
                            ecoregion_lookup[ecoregion_id]['WWF_MHTNAM']))
                        outfile.write(";%f;%f" % (global_grid_row, global_grid_col))
                        for global_grid_resolution in grid_resolution_list:
                            #output a grid coordinate in the form 'grid_row-grid_col'
                            grid_row = int(global_grid_row/(global_grid_resolution*1000))
                            grid_col = int(global_grid_col/(global_grid_resolution*1000))
                            grid_id = str(grid_row) + '-' + str(grid_col)
                            outfile.write(";%s" % grid_id)
                            if grid_id not in grid_coordinates[global_grid_resolution]:
                                grid_row_center = grid_row * global_grid_resolution*1000 + GLOBAL_UPPER_LEFT_ROW
                                grid_col_center = grid_col * global_grid_resolution*1000 + GLOBAL_UPPER_LEFT_COL
                                grid_lng_coord, grid_lat_coord, _ = coord_transform.TransformPoint(
                                    grid_col_center, grid_row_center)
                                grid_coordinates[global_grid_resolution][grid_id] = (grid_lat_coord, grid_lng_coord)
                                print grid_lat_coord, grid_lng_coord
                        outfile.write('/n')
    outfile.close()
    for global_grid_resolution in grid_resolution_list:
        output_dir, base_filename = os.path.split(biomass_stats_uri)
        basename = os.path.basename(base_filename)
        grid_output_file = open(os.path.join(output_dir, basename + '_' + str(global_grid_resolution) + '.csv'), 'w')
        grid_output_file.write('grid id;lat_coord;lng_coord/n')
        open(biomass_stats_uri, 'w')
        for grid_id, (lat, lng) in grid_coordinates[global_grid_resolution].iteritems():
            grid_output_file.write('%s;%s;%s/n' % (grid_id, lat, lng))
        grid_output_file.close()
    def run(self):
        ecoregion_lookup = raster_utils.extract_datasource_table_by_key(
            ECOREGION_SHAPEFILE_URI, 'ECO_ID_U')
        ecoregion_nodata = -1
        ecoregion_lookup[ecoregion_nodata] = {
            'ECO_NAME': 'UNKNOWN',
            'ECODE_NAME': 'UNKNOWN',
            'WWF_MHTNAM': 'UNKNOWN',
            }
        cell_size = raster_utils.get_cell_size_from_uri(
            FOREST_EDGE_DISTANCE_URI)
        forest_edge_nodata = raster_utils.get_nodata_from_uri(
            FOREST_EDGE_DISTANCE_URI)
        biomass_nodata = raster_utils.get_nodata_from_uri(GLOBAL_BIOMASS_URI)
        outfile = open(BIOMASS_STATS_URI, 'w')

        ecoregion_dataset = gdal.Open(ECOREGION_DATASET_URI)
        ecoregion_band = ecoregion_dataset.GetRasterBand(1)

        biomass_ds = gdal.Open(GLOBAL_BIOMASS_URI, gdal.GA_ReadOnly)
        biomass_band = biomass_ds.GetRasterBand(1)

        forest_edge_distance_ds = gdal.Open(FOREST_EDGE_DISTANCE_URI)
        forest_edge_distance_band = forest_edge_distance_ds.GetRasterBand(1)

        n_rows, n_cols = raster_utils.get_row_col_from_uri(GLOBAL_BIOMASS_URI)

        base_srs = osr.SpatialReference(biomass_ds.GetProjection())
        lat_lng_srs = base_srs.CloneGeogCS()
        coord_transform = osr.CoordinateTransformation(
            base_srs, lat_lng_srs)
        geo_trans = biomass_ds.GetGeoTransform()

        block_col_size, block_row_size = biomass_band.GetBlockSize()
        n_global_block_rows = int(math.ceil(float(n_rows) / block_row_size))
        n_global_block_cols = int(math.ceil(float(n_cols) / block_col_size))

        last_time = time.time()
        for global_block_row in xrange(n_global_block_rows):
            current_time = time.time()
            if current_time - last_time > 5.0:
                print (
                    "aggregation %.1f%% complete" %
                    (global_block_row / float(n_global_block_rows) * 100))
                last_time = current_time
            for global_block_col in xrange(n_global_block_cols):
                xoff = global_block_col * block_col_size
                yoff = global_block_row * block_row_size
                win_xsize = min(block_col_size, n_cols - xoff)
                win_ysize = min(block_row_size, n_rows - yoff)
                biomass_block = biomass_band.ReadAsArray(
                    xoff=xoff, yoff=yoff, win_xsize=win_xsize,
                    win_ysize=win_ysize)
                forest_edge_distance_block = (
                    forest_edge_distance_band.ReadAsArray(
                        xoff=xoff, yoff=yoff, win_xsize=win_xsize,
                        win_ysize=win_ysize))
                ecoregion_id_block = ecoregion_band.ReadAsArray(
                    xoff=xoff, yoff=yoff, win_xsize=win_xsize,
                    win_ysize=win_ysize)

                for global_row in xrange(
                        global_block_row*block_row_size,
                        min((global_block_row+1)*block_row_size, n_rows)):
                    for global_col in xrange(
                            global_block_col*block_col_size,
                            min((global_block_col+1)*block_col_size, n_cols)):
                        row_coord = (
                            geo_trans[3] + global_row * geo_trans[5])
                        col_coord = (
                            geo_trans[0] + global_col * geo_trans[1])

                        local_row = (
                            global_row - global_block_row * block_row_size)
                        local_col = (
                            global_col - global_block_col * block_col_size)

                        lng_coord, lat_coord, _ = (
                            coord_transform.TransformPoint(
                                col_coord, row_coord))

                        ecoregion_id = ecoregion_id_block[local_row, local_col]
                        if (forest_edge_distance_block[local_row, local_col] !=
                                forest_edge_nodata and
                                forest_edge_distance_block
                                [local_row, local_col] > 0.0 and
                                biomass_block
                                [local_row, local_col] != biomass_nodata):
                            outfile.write("%f;%f;%f;%f;%s;%s;%s" % (
                                forest_edge_distance_block
                                [local_row, local_col] * cell_size,
                                biomass_block[local_row, local_col],
                                lat_coord, lng_coord,
                                ecoregion_lookup[ecoregion_id]['ECO_NAME'],
                                ecoregion_lookup[ecoregion_id]['ECODE_NAME'],
                                ecoregion_lookup[ecoregion_id]['WWF_MHTNAM']))
                            for global_grid_resolution in GRID_RESOLUTION_LIST:
                                #output a grid coordinate in the form
                                #'grid_row-grid_col'
                                grid_row = (
                                    int((geo_trans[3] - row_coord) /
                                        (global_grid_resolution*1000)))
                                grid_col = (
                                    int((col_coord - geo_trans[0]) /
                                        (global_grid_resolution*1000)))
                                grid_id = str(grid_row) + '-' + str(grid_col)
                                outfile.write(";%s" % grid_id)
                            outfile.write('\n')
        outfile.close()