def test_arrayframe_add(self): temp_path = hb.temp('.tif', 'testing_arrayframe_add', True) hb.add(self.global_1deg_raster_path, self.global_1deg_raster_path, temp_path) temp_path = hb.temp('.tif', 'testing_arrayframe_add', True) af1 = hb.ArrayFrame(self.global_1deg_raster_path) hb.add(af1, af1, temp_path)
def test_read_1d_npy_chunk(self): path = r"C:\temp\_20180327_144427_046hh9\allocate_all_sectors\bau\sector_change_lists.npy" r = np.random.randint(2, 9, 200) temp_path = hb.temp('.npy', remove_at_exit=True) hb.save_array_as_npy(r, temp_path) output = hb.read_1d_npy_chunk(temp_path, 3, 8) self.assertTrue(sum(r[3:3 + 8]) == sum(output))
def test_extract_features_in_shapefile_by_attribute(self): input_shp_uri = 'data/two_poly_wgs84_aoi.shp' output_shp_uri = hb.temp('.shp', remove_at_exit=True) column_name = 'adm1_code' column_filter = 'NLD-903' hb.extract_features_in_shapefile_by_attribute(input_shp_uri, output_shp_uri, column_name, column_filter)
def arrayframe_load_and_save(): input_array = np.arange(0, 18, 1).reshape((3, 6)) input_uri = hb.temp('.tif', remove_at_exit=False) geotransform = hb.calc_cylindrical_geotransform_from_array(input_array) # projection = hb.get_wkt_from_epsg_code(hb.common_epsg_codes_by_name['plate_carree']) projection = 'plate_carree' hb.save_array_as_geotiff(input_array, input_uri, geotransform_override=geotransform, projection_override=projection) hb.ArrayFrame(input_uri)
def test_arrayframe_load_and_save(self): input_array = np.arange(0, 18, 1).reshape((3, 6)) input_uri = hb.temp('.tif', remove_at_exit=True) geotransform = hb.calc_cylindrical_geotransform_from_array(input_array) # projection = hb.get_wkt_from_epsg_code(hb.common_epsg_codes_by_name['plate_carree']) projection = 'wgs84' ndv = 255 data_type = 1 hb.save_array_as_geotiff(input_array, input_uri, geotransform_override=geotransform, projection_override=projection, ndv=ndv, data_type=data_type) hb.ArrayFrame(input_uri)
def test_reclassify_int_array_by_dict_to_ints(self): a = np.random.randint(1, 7, (32, 32)).astype(np.float) rules = { 2: 23, # 3: 23, # 4: 24, # 5: 25, # 6: 26, # 7: 27, } temp_path = hb.temp('.tif', remove_at_exit=True) b = hb.reclassify(a, rules, temp_path) self.assertIsInstance(b, np.ndarray)
def get_tile_names_and_degrees_from_aoi(shapefile_uri, tile_increment): """Get a list of strings representing tile names under the nsew-degree structure ie ['n10w90', 'n10w85', 'n15w90', 'n15w85', 'n20w90', 'n20w85'] """ temp_uri = hb.temp(ext='.shp', remove_at_exit=True) print('shapefile_uri', shapefile_uri) hb.reproject_datasource_uri(shapefile_uri, hb.wgs_84_wkt, temp_uri) tile_names = [] bb = hb.get_datasource_bounding_box(temp_uri) degrees = [0,0,0,0] degrees[0] = hb.round_to_nearest_containing_increment(bb[1], tile_increment, 'up') degrees[1] = hb.round_to_nearest_containing_increment(bb[0], tile_increment, 'down') degrees[2] = hb.round_to_nearest_containing_increment(bb[3], tile_increment, 'down') degrees[3] = hb.round_to_nearest_containing_increment(bb[2], tile_increment, 'up') ns_degree_increments = list(range(degrees[2], degrees[0] + tile_increment, tile_increment)) ew_degree_increments = list(range(degrees[1], degrees[3] + tile_increment, tile_increment)) for c1, ns in enumerate(ns_degree_increments): for c2, ew in enumerate(ew_degree_increments): to_append = '' if ns < 0: to_append += 's' + str(ns).replace('-', '').zfill(2) else: to_append += 'n' + str(ns).replace('-', '').zfill(2) if ew < 0: to_append += 'w' + str(ew).replace('-', '').zfill(3) else: to_append += 'e' + str(ew).replace('-', '').zfill(3) tile_names.append(to_append) return tile_names, degrees
def clip_hydrosheds_dem_from_aoi(output_uri, aoi_uri, match_uri): hydrosheds_dir = os.path.join(hb.BULK_DATA_DIR, 'hydrosheds/3s/hydrologically_conditioned_dem') temp_uri = hb.temp('.tif', remove_at_exit=True) merge_hydrosheds_by_aoi(hydrosheds_dir, temp_uri, aoi_uri) hb.clip_dataset_uri(temp_uri, aoi_uri, output_uri, assert_datasets_projected=False)
def __truediv__(self, after): def op(left, right): return left + right output_path = hb.temp(filename_start='div', remove_at_exit=False) return hb.raster_calculator_af_flex([self.path, after.path], op, output_path)
def __sub__(self, after): def op(left, right): return left - right output_path = hb.temp(filename_start='sub', remove_at_exit=True) return hb.raster_calculator_af_flex([self.path, after.path], op, output_path)
def test_create_vector_from_raster_extents(self): extent_path = hb.temp('.shp', remove_at_exit=True) hb.create_vector_from_raster_extents(self.global_5m_raster_path, extent_path) self.assertTrue(os.path.exists(extent_path))
def test_resample_arrayframe(self): temp_path = hb.temp('.tif', 'temp_test_resample_array', True) hb.resample(self.global_5m_raster_path, temp_path, 12)
import hazelbean as hb global_random_floats_15m_32bit_path = os.path.join(hb.TEST_DATA_DIR, 'global_random_floats_15m_32bit.tif') two_poly_eckert_iv_aoi_path = os.path.join(hb.TEST_DATA_DIR, 'two_poly_eckert_iv_aoi.shp') two_poly_wgs84_aoi_path = os.path.join(hb.TEST_DATA_DIR, 'two_poly_wgs84_aoi.shp') a = hb.as_array(global_random_floats_15m_32bit_path) # Old clip method for reference # hb.clip_dataset_uri(global_random_floats_15m_32bit_path, two_poly_wgs84_aoi_path, hb.temp('.tif', 'clip1', False, 'tests')) base_raster_path_list = [global_random_floats_15m_32bit_path] target_raster_path_list = [hb.temp('.tif', 'clip1', False, 'tests')] resample_method_list = ['bilinear'] target_pixel_size = hb.get_raster_info(global_random_floats_15m_32bit_path)['pixel_size'] bounding_box_mode = 'intersection' base_vector_path_list = [two_poly_wgs84_aoi_path] raster_align_index = 0 hb.align_and_resize_raster_stack( base_raster_path_list, target_raster_path_list, resample_method_list, target_pixel_size, bounding_box_mode, base_vector_path_list=base_vector_path_list, all_touched=True, raster_align_index=raster_align_index, gtiff_creation_options=hb.DEFAULT_GTIFF_CREATION_OPTIONS) hb.clip_raster_by_vector(global_random_floats_15m_32bit_path, hb.temp('.tif', 'clip2', False, 'tests'), two_poly_wgs84_aoi_path)
def create_physical_suitability(): global p L.info('Creating physical suitability layer from base data.') # physical suitability calculations, though for speed it's included as a base datum. dem_unaligned_path = hb.temp( '.tif', folder=p.workspace_dir, remove_at_exit=True) #hb.temp('.tif', remove_at_exit=True) stats_to_calculate = ['TRI'] hb.clip_hydrosheds_dem_from_aoi(dem_unaligned_path, p.area_of_interest_path, p.match_float_path) hb.calculate_topographic_stats_from_dem( dem_unaligned_path, p.physical_suitability_dir, stats_to_calculate=stats_to_calculate, output_suffix='unaligned') dem_path = os.path.join(p.physical_suitability_dir, 'dem.tif') hb.align_dataset_to_match(dem_unaligned_path, p.match_float_path, dem_path, aoi_uri=p.area_of_interest_path) for stat in stats_to_calculate: stat_unaligned_path = os.path.join(p.physical_suitability_dir, stat + '_unaligned.tif') hb.delete_path_at_exit(stat_unaligned_path) stat_path = os.path.join(p.physical_suitability_dir, stat + '.tif') hb.align_dataset_to_match(stat_unaligned_path, p.match_float_path, stat_path, resample_method='bilinear', align_to_match=True, aoi_uri=p.area_of_interest_path) soc_path = os.path.join(p.physical_suitability_dir, 'soc.tif') hb.align_dataset_to_match(p.base_data_soc_path, p.match_int_path, soc_path, aoi_uri=p.area_of_interest_path, output_data_type=7) tri_path = os.path.join(p.physical_suitability_dir, 'tri.tif') hb.align_dataset_to_match(p.base_data_tri_path, p.match_int_path, tri_path, aoi_uri=p.area_of_interest_path, output_data_type=7) # TODOO Create cythonized array_sum_product() p.physical_suitability_path = os.path.join(p.physical_suitability_dir, 'physical_suitability.tif') soc_array = hb.as_array(soc_path) tri_array = hb.as_array(tri_path) physical_suitability_array = np.log(soc_array) - np.log(tri_array) # p.global_physical_suitability_path = os.path.join(p.model_base_data_dir, 'physical_suitability_compressed.tif') p.clipped_physical_suitability_path = os.path.join( p.cur_dir, 'physical_suitability.tif') if p.run_this and p.run_this_zone: # hb.clip_raster_by_vector(p.global_physical_suitability_path, p.physical_suitability_path, p.coarse_res_aoi_path, all_touched=True) hb.clip_while_aligning_to_coarser( p.physical_suitability_path, p.clipped_physical_suitability_path, p.area_of_interest_path, p.current_change_map_paths[0], resample_method='nearest', all_touched=True, verbose=True, ensure_fits=True, gtiff_creation_options=hb.DEFAULT_GTIFF_CREATION_OPTIONS) p.current_physical_suitability_path = p.clipped_physical_suitability_path # NOTE awkward naming # hb.clip_dataset_uri(p.global_physical_suitability_path, p.coarse_res_aoi_path, p.physical_suitability_path, False, all_touched=False) physical_suitability_array = hb.as_array( p.current_physical_suitability_path) p.match_float_path = p.current_physical_suitability_path np.seterr(divide='ignore', invalid='ignore') physical_suitability_array = np.where( physical_suitability_array > -1000, physical_suitability_array, 0) physical_suitability_array = np.where( physical_suitability_array < 100000000, physical_suitability_array, 0) hb.save_array_as_geotiff(physical_suitability_array, p.current_physical_suitability_path, p.match_float_path, compress=True)
def test_describe(self): a = np.random.rand(5, 5) tmp_path = hb.temp('.npy', remove_at_exit=True) hb.save_array_as_npy(a, tmp_path) hb.describe(tmp_path, surpress_print=True, surpress_logger=True)