def __init__(self, path, **kwargs): try: assert os.path.exists(path) is True except: raise NameError('Path ' + str(path) + ' does not exist. Attempting to make an ArrayFrame out of it thus failed.') self.load_data_on_init = kwargs.get('load_data_on_init', False) self.path = path self.ds = gdal.Open(path, gdal.GA_Update) self.band = self.ds.GetRasterBand(1) self.num_cols = self.ds.RasterXSize self.n_cols = self.num_cols self.num_rows = self.ds.RasterYSize self.n_rows = self.num_rows self.shape = (self.num_rows, self.num_cols) self.size = self.num_cols * self.num_rows self.ndv = self.band.GetNoDataValue() # TODOO Consider eliminating data_type self.data_type = self.band.DataType self.datatype = self.data_type self.projection = self.ds.GetProjection() self.geotransform = self.ds.GetGeoTransform() self.cell_size = self.geotransform[1] self.res = self.cell_size self.resolution = self.cell_size self.x_res = self.res self.y_res = self.geotransform[5] if self.x_res != abs(self.y_res): L.warning('Warning, x_res not same as abs(y_res)') self.raster_info = hb.get_raster_info(self.path) self.old_bounding_box = hb.get_bounding_box(self.path, return_in_old_order=True) self.bounding_box = self.raster_info['bounding_box'] # projected coordinates as [minx, miny, maxx, maxy] self.bb = self.bounding_box self.left_lat = self.bb[0] self.bottom_lon = self.bb[1] self.right_lat = self.bb[2] self.top_lon = self.bb[3] self.lat_size = self.left_lat - self.right_lat self.lon_size = self.top_lon - self.bottom_lon # Note that by definition of being an Arrayframe (rather than a block of one), this will always have 0 for first two entries and the second two will (should # be equivilent to n_rows, n_cols self.cr_widthheight = hb.bb_path_to_cr_size(self.path, self.bb) # self.info = gdal.Info(self.ds, format = 'json') self.info_as_string = gdal.Info(self.ds) self._data = None self.data_loaded = False # self.data = None self.stats = None # Time consuming, but can load from set_stats() method. # Set when self.stats is set via set_stats() self.min = None self.max = None self.median = None self.mean = None # Save these so that they don't need to be often recomputed. self._valid_mask = None self.valid_mask_set = False self.num_valid = None self._ndv_mask = None self.ndv_mask_set = False self.num_ndv = None self._nonzero_mask = None self.nonzero_mask_set = False self.num_nonzero = None self._zero_mask = None self.zero_mask_set = False self.num_zero = None if self.load_data_on_init: self.load_data() if self.ndv is None: L.info('NDV for raster at ' + self.path + ' was not set. ') if not self.geotransform: L.critical('Geotransform not set for arrayframe at ' + self.path + '. Forcing to WGS84 global.') if not self.projection: L.critical('Projection not set for arrayframe at ' + self.path + '')
import hazelbean as hb global_random_floats_15m_32bit_path = os.path.join(hb.TEST_DATA_DIR, 'global_random_floats_15m_32bit.tif') two_poly_eckert_iv_aoi_path = os.path.join(hb.TEST_DATA_DIR, 'two_poly_eckert_iv_aoi.shp') two_poly_wgs84_aoi_path = os.path.join(hb.TEST_DATA_DIR, 'two_poly_wgs84_aoi.shp') a = hb.as_array(global_random_floats_15m_32bit_path) # Old clip method for reference # hb.clip_dataset_uri(global_random_floats_15m_32bit_path, two_poly_wgs84_aoi_path, hb.temp('.tif', 'clip1', False, 'tests')) base_raster_path_list = [global_random_floats_15m_32bit_path] target_raster_path_list = [hb.temp('.tif', 'clip1', False, 'tests')] resample_method_list = ['bilinear'] target_pixel_size = hb.get_raster_info(global_random_floats_15m_32bit_path)['pixel_size'] bounding_box_mode = 'intersection' base_vector_path_list = [two_poly_wgs84_aoi_path] raster_align_index = 0 hb.align_and_resize_raster_stack( base_raster_path_list, target_raster_path_list, resample_method_list, target_pixel_size, bounding_box_mode, base_vector_path_list=base_vector_path_list, all_touched=True, raster_align_index=raster_align_index, gtiff_creation_options=hb.DEFAULT_GTIFF_CREATION_OPTIONS) hb.clip_raster_by_vector(global_random_floats_15m_32bit_path, hb.temp('.tif', 'clip2', False, 'tests'), two_poly_wgs84_aoi_path)
def get_bounding_box(input_path, return_in_basemap_order=False, return_in_old_order=False): """ WARNING, This changed notation from UL LR to xmin ymin xmax ymax and may not have back\ward compatibility. from the bounding box reported by pygeoprocessing insofar as it is UL, LR (but PGP is LL, UR) Get bounding box where coordinates are in projected units. Args: input_path (string): a uri to a GDAL dataset Returns: bounding_box (list): [upper_left_x, upper_left_y, lower_right_x, lower_right_y] in projected coordinates """ if os.path.splitext(input_path)[1] == '.shp': bounding_box = hb.get_vector_info_hb(input_path)['bounding_box'] else: bounding_box = hb.get_raster_info(input_path)['bounding_box'] if return_in_basemap_order: dataset = gdal.Open(input_path) geotransform = dataset.GetGeoTransform() n_cols = dataset.RasterXSize n_rows = dataset.RasterYSize bounding_box = [geotransform[0], geotransform[3], geotransform[0] + n_cols * geotransform[1], geotransform[3] + n_rows * geotransform[5]] # Close and cleanup dataset gdal.Dataset.__swig_destroy__(dataset) dataset = None bounding_box = [ bounding_box[3], # llcrnrlat bounding_box[1], # urcrnrlat bounding_box[0], # llcrnrlon bounding_box[2], # urcrnrlon ] return bounding_box if return_in_old_order: dataset = gdal.Open(input_path) geotransform = dataset.GetGeoTransform() n_cols = dataset.RasterXSize n_rows = dataset.RasterYSize bounding_box = [geotransform[0], geotransform[3], geotransform[0] + n_cols * geotransform[1], geotransform[3] + n_rows * geotransform[5]] # Close and cleanup dataset gdal.Dataset.__swig_destroy__(dataset) dataset = None return bounding_box return bounding_box
def resample_to_match(input_path, match_path, output_path, resample_method='bilinear', output_data_type=None, src_ndv=None, ndv=None, compress=True, ensure_fits=False, gtiff_creation_options=hb.DEFAULT_GTIFF_CREATION_OPTIONS, calc_raster_stats=False, add_overviews=False, pixel_size_override=None, verbose=False, ): if pixel_size_override is None: target_pixel_size = (hb.get_cell_size_from_uri(match_path), -hb.get_cell_size_from_uri(match_path)) elif not isinstance(pixel_size_override, (tuple, list)): target_pixel_size = (pixel_size_override, -pixel_size_override) target_sr_wkt = hb.get_raster_info(match_path)['projection'] target_bb = hb.get_raster_info_hb(match_path)['bounding_box'] if output_data_type is None: output_data_type = hb.get_datatype_from_uri(match_path) if src_ndv is None: src_ndv = hb.get_ndv_from_path(input_path) if ndv is None: dst_ndv = hb.get_ndv_from_path(match_path) else: if output_data_type < 5: dst_ndv = 255 else: dst_ndv = -9999.0 if ensure_fits: # This addition to the core geoprocessing code was to fix the case where the alignment moved the target tif # up and to the left, but in a way that then trunkated 1 row/col on the bottom right, causing wrong-shape # raster_math errors.z pass # target_bounding_box = reduce( # functools.partial(hb.merge_bounding_boxes, mode=bounding_box_mode), # [info['bounding_box'] for info in # (raster_info_list + vector_info_list)]) # # if original_bounding_box[2] > target_bounding_box[2]: # target_bounding_box[2] += target_pixel_size[0] # # if original_bounding_box[3] > target_bounding_box[3]: # target_bounding_box[3] -= target_pixel_size[1] target_bb[2] += target_pixel_size[0] target_bb[3] += target_pixel_size[1] if compress is True: gtiff_creation_options = ( 'TILED=YES', 'BIGTIFF=YES', 'COMPRESS=DEFLATE', 'BLOCKXSIZE=256', 'BLOCKYSIZE=256', ) else: gtiff_creation_options = ( 'TILED=YES', 'BIGTIFF=YES', 'BLOCKXSIZE=256', 'BLOCKYSIZE=256', ) hb.warp_raster_hb(input_path, target_pixel_size, output_path, resample_method, target_bb=target_bb, base_sr_wkt=None, target_sr_wkt=target_sr_wkt, gtiff_creation_options=gtiff_creation_options, n_threads=None, vector_mask_options=None, output_data_type=output_data_type, src_ndv=src_ndv, dst_ndv=dst_ndv, calc_raster_stats=calc_raster_stats, add_overviews=add_overviews, )
def process_coarse_change_maps(): global p L.info('process_coarse_change_maps.') # Change maps are in this directory and must be of the format [CLASS_ID_INT]_[someting, but anything else].tif if not os.path.isdir(p.coarse_change_maps_dir): p.coarse_change_maps_dir = os.path.split(p.coarse_change_maps_dir)[0] if not os.path.isdir(p.coarse_change_maps_dir): raise NameError('Unable to parse coarse_change_maps_dir.') tifs_in_dir = hb.list_filtered_paths_nonrecursively( p.coarse_change_maps_dir, include_extensions='.tif') p.change_map_paths = [] for path in tifs_in_dir: try: rendered_int = int(hb.file_root(path).split('_')[0]) except: rendered_int = None if isinstance(rendered_int, int): p.change_map_paths.append(path) p.change_map_raster_infos = [ hb.get_raster_info(i) for i in p.change_map_paths ] # Test that all the change maps are the same properties. if len(set([i['geotransform'] for i in p.change_map_raster_infos])) != 1: for j in [i['geotransform'] for i in p.change_map_raster_infos]: L.critical('geotransform: ' + str(j)) # raise NameError('The maps in coarse change maps dir are not all the same shape, projection, etc, or they have been improperly named/formatted.') # p.current_change_in_crop_extent_path = os.path.join(p.cur_dir, 'change_in_crop_extent.tif') p.current_change_map_paths = [] p.float_ndv = None p.int_ndv = 255 L.info('change_map_paths: ' + str(p.change_map_paths)) p.zone_transition_sums = OrderedDict() p.classes_projected_to_change = [] for path in p.change_map_paths: changing_class_id = int(os.path.split(path)[1].split('_')[0]) p.classes_projected_to_change.append(changing_class_id) if not p.float_ndv: p.float_ndv = hb.get_nodata_from_uri(path) if p.float_ndv is None: p.float_ndv = -9999.0 new_path = os.path.join(p.cur_dir, os.path.split(path)[1]) p.current_change_map_paths.append(new_path) if p.run_this: # NOTE NONSTANDARD placement of run_this hb.clip_raster_by_vector( str(path), str(new_path), str(p.area_of_interest_path), resample_method='nearest', all_touched=True, verbose=True, ensure_fits=True, gtiff_creation_options=hb.DEFAULT_GTIFF_CREATION_OPTIONS) # To make the model not run in zones with zero change, we collect these sums and prevent runing if all of them are zero current_coarse_array = hb.as_array(new_path) current_sum = np.sum( current_coarse_array[current_coarse_array != p.float_ndv]) p.zone_transition_sums[changing_class_id] = current_sum p.run_this_zone = True if np.sum([float(i) for i in p.zone_transition_sums.values()]) <= 0: p.run_this_zone = False L.info('current_change_map_paths' + str(p.current_change_map_paths))