def __init__(self, prj_path, path_in_raw, preproccessed_input_path, results): self.raw_input_data_path = "%s/%s/" % (prj_path, path_in_raw) self.results_data_path = "%s/%s/Processed Data/" % (prj_path, results) self.preproccessed_input_path = "%s/%s/" % (prj_path, preproccessed_input_path) self.GFA_area = "%s/RESULTS_GFA_TOT_BUILD.tif" % self.results_data_path self.rel_CDD = "%s/HDD_CDD_data_new/CDD_EnergyIntensityIndicator.tif" % self.preproccessed_input_path self.NUTSID_map = "%s/NUTS3_id_number.tif" % self.preproccessed_input_path self.csv_NUTS_data = "%s/NUTS3_data.csv" % self.raw_input_data_path """ # Standard Vector layer (Nuts 3 shape file) self.strd_vector_path_NUTS = self.raw_input_data_path + "/vector_input_data/" + "NUTS3.shp" # Standard raster Layer self.strd_raster_path_full = self.raw_input_data_path + os.sep + "Population.tif" """ assert os.path.exists(self.GFA_area) assert os.path.exists(self.rel_CDD) assert os.path.exists(self.NUTSID_map) assert os.path.exists(self.csv_NUTS_data) self.datatype_int = 'int32' self.datatype = 'f4' self.noDataValue = 0 (NUTS_DATA, EXPORT_COLUMNS, SCALING_FACTOR, CUTOFF_Value) = RCD.READ_CSV_DATA(self.csv_NUTS_data) A = np.zeros(NUTS_DATA['ENERGY_COOL_per_m2'].shape[0], dtype="f4") A[0:] = NUTS_DATA['ENERGY_COOL_per_m2'] ARR_NUTS_ID_NUMBER, geotransform_obj = SF.rrl( self.NUTSID_map, data_type=self.datatype_int) ENERGY_COOL_per_m2 = A[ARR_NUTS_ID_NUMBER] * 0.5 / 1000.0 rel_CDD, geotransform_obj = SF.rrl(self.rel_CDD, data_type=self.datatype) ENERGY_COOL_per_m2 *= np.minimum(3, rel_CDD) gfa_tot, geotransform_obj = SF.rrl(self.GFA_area, data_type=self.datatype) ENERGY_COOL = gfa_tot * ENERGY_COOL_per_m2 SaveLayerDict = {} SaveLayerDict["cool"] = [ ("%s/%s.tif" % (self.results_data_path, "RESULTS_ENERGY_COOLING_TOT")), geotransform_obj, self.datatype, ENERGY_COOL, self.noDataValue ] SaveLayerDict = expLyr(SaveLayerDict) print("DONE COOLING")
def main_process(self, NUTS3_feat_id_LIST): start_time = time.time() SaveLayerDict = {} # Load Raster Reference Layer (REFERENCE_RasterResolution, HighRes_gt_obj, self.LOAD_DATA_PREVIOUS , Ref_layer_is_uncut) = \ self.load_reference_raster_lyr(self.NUTS3_vector_path, self.strd_raster_path_full, self.temp_path, NUTS3_feat_id_LIST) ####################################### # # Create raster Map (Array) which contains NUTS ID Number # ####################################### st = time.time() print("\nCreate INDEX MAPS for NUTS3 and LAU2") OutPutRasterPathNuts = self.NUTS_cut_id_number OutPutRasterPathLau2 = self.LAU2_cut_id_number dataType16 = 'uint16' dataType32 = self.datatype_int create_new = True if (self.LOAD_DATA_PREVIOUS == True and os.path.exists(OutPutRasterPathNuts) == True and os.path.exists(OutPutRasterPathLau2) == True): create_new = False try: #ARR_LAU2_ID_NUMBER, geotransform_obj = SF.rrl(OutPutRasterPathLau2, data_type=dataType32) ARR_NUTS_ID_NUMBER, geotransform_obj = SF.rrl( OutPutRasterPathNuts, data_type=dataType16) except: create_new = True ResIncreaseFactor = REFERENCE_RasterResolution / TARGET_RESOLUTION if create_new == True: if (os.path.exists(self.NUTS_id_number) == True and os.path.exists(self.LAU2_id_number) == True and os.path.getmtime(self.LAU2_vector_path) < os.path.getmtime(self.NUTS_id_number) and os.path.getmtime(self.LAU2_vector_path) < os.path.getmtime(self.LAU2_id_number)): # The fully extended map exists already and is recent # Therefore just load and clip to the desired extent ARR_NUTS_ID_NUMBER, geotransform_obj = CRL.clip_raster_layer( self.NUTS_id_number, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) ARR_LAU2_ID_NUMBER, geotransform_obj = CRL.clip_raster_layer( self.LAU2_id_number, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) else: ARR_size_high_res = [] ARR_size_high_res.append( int(self.REFERENCE_RasterSize[0] * ResIncreaseFactor)) ARR_size_high_res.append( int(self.REFERENCE_RasterSize[1] * ResIncreaseFactor)) #Create Rasterfile with NUTS ID for each pixel ARR_NUTS_ID_NUMBER, geotransform_obj = createIndexMap( self.NUTS3_vector_path, self.NUTS3_cut_vector_path, self.REFERENCE_extent, HighRes_gt_obj, ARR_size_high_res, self.noDataValue, dataType16, key_field="NUTS_ID", value_field="IDNUMBER", out_field_name="IDNUMBER") #Create Rasterfile with LAU2 ID for each pixel ARR_LAU2_ID_NUMBER, geotransform_obj = createIndexMap( self.LAU2_vector_path, self.LAU2_cut_vector_path, self.REFERENCE_extent, HighRes_gt_obj, ARR_size_high_res, self.noDataValue, dataType32, key_field="COMM_ID", value_field="LAU_UDATAI", out_field_name="LAU_UDATAI") SaveLayerDict["NUTS_ID_NUMBER"] = [ OutPutRasterPathNuts, geotransform_obj, dataType16, ARR_NUTS_ID_NUMBER, self.noDataValue ] SaveLayerDict["LAU2_ID_NUMBER"] = [ OutPutRasterPathLau2, geotransform_obj, dataType32, ARR_LAU2_ID_NUMBER, self.noDataValue ] SaveLayerDict = expLyr(SaveLayerDict) if Ref_layer_is_uncut == True or 1 == 1: shutil.copy2(OutPutRasterPathNuts, self.NUTS_id_number) shutil.copy2(OutPutRasterPathLau2, self.LAU2_id_number) shutil.copy2(OutPutRasterPathNuts, self.NUTS_id_number_prepro) shutil.copy2(OutPutRasterPathLau2, self.LAU2_id_number_prepro) elapsed_time = time.time() - st print("Process Create INDEX MAPS took: %4.1f seconds" % elapsed_time) print("create Country ID Map") elapsed_time = time.time() - start_time start_time2 = time.time() NutsData = RCD.READ_CSV_DATA(self.csvNutsData, delimiter=",", skip_header=6)[0] if NutsData["COUNTRY_ID"][5] == 0: CNAME = np.unique(NutsData["COUNTRY_CODE"]) cid = 0 for ele in COUNTRIES: for ele2 in CNAME: if ele[1] == ele2: idx = NutsData["COUNTRY_CODE"] == ele2 NutsData["COUNTRY_NRCODE"][idx] = ele[0] cid += 1 NutsData["COUNTRY_ID"][idx] = cid #np.savetxt(self.csvNutsData + "_new_cnr", NutsData['COUNTRY_NRCODE'], delimiter=",") #np.savetxt(self.csvNutsData + "_new_id", NutsData['COUNTRY_ID'], delimiter=",") ARR_COUNTRY_ID_NUMBER = np.zeros_like(ARR_NUTS_ID_NUMBER) for jj, CID in enumerate(np.unique(NutsData["COUNTRY_ID"])): print(jj) #if jj > 5: # break NUTS3IDlist = NutsData["DI"][NutsData["COUNTRY_ID"] == CID] NUTS3_list = np.zeros(NUTS3IDlist.shape[0], dtype="uint16") for kk, ele in enumerate(NUTS3IDlist): id_ = ele[3:] if id_.isnumeric(): id_ = int(id_) NUTS3_list[kk] = id_ else: print("Cannot convert to integer %s" % id_) if NUTS3_list[-1] - NUTS3_list[0] + 1 == NUTS3IDlist.shape[0]: idx = np.logical_and(ARR_NUTS_ID_NUMBER >= NUTS3_list[0], ARR_NUTS_ID_NUMBER <= NUTS3_list[-1]) ARR_COUNTRY_ID_NUMBER[idx] = CID else: print("CHECXK COUNTRY: %i" % CID) SaveLayerDict["COUNTRY_ID_NUMBER"] = [ self.Country_id_number, geotransform_obj, "int8", ARR_COUNTRY_ID_NUMBER, self.noDataValue ] SaveLayerDict = expLyr(SaveLayerDict) elapsed_time2 = time.time() - start_time2 print( "\n\n\n######\n\nThe whole process took: %4.1f + %4.1f seconds\n\n\n" % (elapsed_time, elapsed_time2)) # XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX # XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX Close XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX print("Done!") return
def __init__(self, prj_path, path_in_raw, preproccessed_input_path, results): self.raw_input_data_path = "%s/%s/" % (prj_path, path_in_raw) self.results_data_path = "%s/%s/Processed Data/" % (prj_path, results) self.preproccessed_input_path = "%s/%s/" % (prj_path, preproccessed_input_path) fn_NUTSID_map = "%s/NUTS3_id_number.tif" % self.preproccessed_input_path fn_LAUID_map = "%s/LAU2_id_number.tif" % self.preproccessed_input_path fn_footprint_esm_share = "%s/_____ESM100m_final.tif" % self.raw_input_data_path fn_footprint_osm = "%s/_____0__FINAL_OSMbuildings_FOOTPRINT.tif" % self.preproccessed_input_path #self.rel_CDD = "%s/HDD_CDD_data_new/CDD_EnergyIntensityIndicator.tif" % self.preproccessed_input_path fn_HDD = "%s/HDD_CDD_data_new/HDD_FINAL.tif" % self.preproccessed_input_path fn_solar_radiation = "%s/solar_radiation/climate_solar_radiation/data/output_solar_radiation.tif" % self.raw_input_data_path fn_corine_landuse = "%s/g100_clc12_V18_5.tif" % self.raw_input_data_path assert os.path.exists(fn_NUTSID_map) assert os.path.exists(fn_LAUID_map) assert os.path.exists(fn_footprint_esm_share) assert os.path.exists(fn_footprint_osm) assert os.path.exists(fn_solar_radiation) assert os.path.exists(fn_corine_landuse) self.datatype_int = 'int32' self.datatype = 'f4' self.noDataValue = 0 NUTSID_map, geotransform_obj = SF.rrl(fn_NUTSID_map, data_type=self.datatype) REFERENCE_RasterSize = NUTSID_map.shape solar_radiation, geotransform_obj_solar = SF.rrl( fn_solar_radiation, data_type=self.datatype) solar_radiation, geotransform_obj_clip = CRL.clip_raster_layer( [solar_radiation, geotransform_obj_solar], geotransform_obj, REFERENCE_RasterSize) hdd, geotransform_obj_clip = CRL.clip_raster_layer( fn_HDD, geotransform_obj, REFERENCE_RasterSize) solar_radiation_ = np.zeros_like(solar_radiation) is_not_nan_ = np.isnan(solar_radiation) == False solar_radiation_[is_not_nan_] = solar_radiation[is_not_nan_] footprint_esm_share, geotransform_obj_clip = CRL.clip_raster_layer( fn_footprint_esm_share, geotransform_obj, REFERENCE_RasterSize) footprint_osm_share, geotransform_obj_clip = CRL.clip_raster_layer( fn_footprint_osm, geotransform_obj, REFERENCE_RasterSize) footprint = np.maximum(footprint_esm_share * 10000. / 100., footprint_osm_share) del footprint_esm_share del footprint_osm_share corine_land_use, geotransform_obj_clip = CRL.clip_raster_layer( fn_corine_landuse, geotransform_obj, REFERENCE_RasterSize) delta_eff = np.maximum(-0.05, np.minimum(0.03, (3000 - hdd) / 2000 * 0.05)) efficiency = 0.38 max_sol_area_per_roof_area = 0.25 max_sol_area_per_plot_area = 0.25 corine_ids_to_use_list = [12, 18, 19, 20, 21, 22, 29, 32] idx = np.zeros(corine_land_use.shape) #idx[:,:] = 0 for i in corine_ids_to_use_list: idx = np.logical_or(idx, corine_land_use == i) energy_open_field = solar_radiation * max_sol_area_per_plot_area * ( efficiency + delta_eff) * np.maximum( 0, 10000.0 - footprint * 5.0) / 1000.0 # MWh / px(ha) energy_open_field[idx == False] = 0 energy_roof_top = solar_radiation * max_sol_area_per_roof_area * ( efficiency + delta_eff) / 1000.0 energy_roof_top *= footprint SaveLayerDict = {} SaveLayerDict["footprint"] = [ ("%s/%s.tif" % (self.results_data_path, "RESULTS_BUILDING_FOOTPRINT")), geotransform_obj, self.datatype, footprint, self.noDataValue ] SaveLayerDict["energy_open_field"] = [ ("%s/%s.tif" % (self.results_data_path, "RESULTS_ENERGY_SOLARTHERM_OPENFIELD")), geotransform_obj, self.datatype, energy_open_field, self.noDataValue ] SaveLayerDict["energy_roof_top"] = [ ("%s/%s.tif" % (self.results_data_path, "RESULTS_ENERGY_SOLARTHERM_ROOFTOP")), geotransform_obj, self.datatype, energy_roof_top, self.noDataValue ] SaveLayerDict = expLyr(SaveLayerDict) TABLE_RESULTS_NUTS_openfield = CDM.CreateResultsTableperIndicator( energy_open_field, NUTSID_map) TABLE_RESULTS_NUTS_rooftop = CDM.CreateResultsTableperIndicator( energy_roof_top, NUTSID_map) TABLE_RESULTS_NUTS_BUILDING_FOOTPRINT = CDM.CreateResultsTableperIndicator( footprint, NUTSID_map) LAUID_map, geotransform_obj = SF.rrl(fn_LAUID_map, data_type=self.datatype) TABLE_RESULTS_LAU_openfield = CDM.CreateResultsTableperIndicator( energy_open_field, LAUID_map) TABLE_RESULTS_LAU_rooftop = CDM.CreateResultsTableperIndicator( energy_roof_top, LAUID_map) TABLE_RESULTS_LAU_BUILDING_FOOTPRINT = CDM.CreateResultsTableperIndicator( footprint, NUTSID_map) header = ["_ID_", "VALUE"] header = ",".join(header) np.savetxt("%s/%s.csv" % (self.results_data_path, "RESULTS_ENERGY_SOLARTHERM_OPENFIELD__TABLE_RES_LAU2"), np.round(TABLE_RESULTS_LAU_openfield, 3), delimiter=",", header=header, comments="") np.savetxt("%s/%s.csv" % (self.results_data_path, "RESULTS_ENERGY_SOLARTHERM_ROOFTOP__TABLE_RES_LAU2"), np.round(TABLE_RESULTS_LAU_rooftop, 3), delimiter=",", header=header, comments="") np.savetxt("%s/%s.csv" % (self.results_data_path, "RESULTS_ENERGY_SOLARTHERM_OPENFIELD__TABLE_RES_NUTS"), np.round(TABLE_RESULTS_NUTS_openfield, 3), delimiter=",", header=header, comments="") np.savetxt("%s/%s.csv" % (self.results_data_path, "RESULTS_ENERGY_SOLARTHERM_ROOFTOP__TABLE_RES_NUTS"), np.round(TABLE_RESULTS_NUTS_rooftop, 3), delimiter=",", header=header, comments="") np.savetxt("%s/%s.csv" % (self.results_data_path, "RESULTS_BUILDING_FOOTPRINT__TABLE_RES_LAU2"), np.round(TABLE_RESULTS_LAU_BUILDING_FOOTPRINT, 3), delimiter=",", header=header, comments="") np.savetxt("%s/%s.csv" % (self.results_data_path, "RESULTS_BUILDING_FOOTPRINT__TABLE_RES_NUTS"), np.round(TABLE_RESULTS_NUTS_BUILDING_FOOTPRINT, 3), delimiter=",", header=header, comments="") print("DONE SOLAR_RASTER")
def main_process(self, NUTS3_feat_id_LIST): start_time = time.time() if del_temp_path: if os.path.exists(self.temp_path): shutil.rmtree(self.temp_path) if not os.path.exists(self.temp_path): os.makedirs(self.temp_path) SaveLayerDict = {} # Load Raster Reference Layer (REFERENCE_RasterResolution, HighRes_gt_obj, self.LOAD_DATA_PREVIOUS) = \ self.load_reference_raster_lyr(self.NUTS3_vector_path, self.strd_raster_path_full, self.temp_path, NUTS3_feat_id_LIST) ####################################### # # Create raster Map (Array) which contains NUTS ID Number # ####################################### st = time.time() print("\nLOAD INDEX MAPS for NUTS3 and LAU2") OutPutRasterPathNuts = self.NUTS_cut_id_number OutPutRasterPathLau2 = self.LAU2_cut_id_number dataType16 = 'uint16' dataType32 = self.datatype_int create_new = True if (self.LOAD_DATA_PREVIOUS == True and os.path.exists(OutPutRasterPathNuts) == True and os.path.exists(OutPutRasterPathLau2) == True): create_new = False try: ARR_LAU2_ID_NUMBER, geotransform_obj = SF.rrl( OutPutRasterPathLau2, data_type=dataType32) ARR_NUTS_ID_NUMBER, geotransform_obj = SF.rrl( OutPutRasterPathNuts, data_type=dataType16) except: create_new = True ResIncreaseFactor = REFERENCE_RasterResolution / TARGET_RESOLUTION if create_new == True: if (os.path.exists(self.NUTS_id_number) == True and os.path.exists(self.LAU2_id_number) == True and os.path.getmtime(self.LAU2_vector_path) < os.path.getmtime(self.NUTS_id_number) and os.path.getmtime(self.LAU2_vector_path) < os.path.getmtime(self.LAU2_id_number)): # The fully extended map exists already and is recent # Therefore just load and clip to the desired extent ARR_NUTS_ID_NUMBER, geotransform_obj = CRL.clip_raster_layer( self.NUTS_id_number, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) ARR_LAU2_ID_NUMBER, geotransform_obj = CRL.clip_raster_layer( self.LAU2_id_number, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) else: print("CREATE NUTS_ID_NUMBER and LAU2_ID_NUMBER MAP first") SaveLayerDict["NUTS_ID_NUMBER"] = [ OutPutRasterPathNuts, geotransform_obj, dataType16, ARR_NUTS_ID_NUMBER, self.noDataValue ] SaveLayerDict["LAU2_ID_NUMBER"] = [ OutPutRasterPathLau2, geotransform_obj, dataType32, ARR_LAU2_ID_NUMBER, self.noDataValue ] if EXPORT_AT_THE_END == False: SaveLayerDict = expLyr(SaveLayerDict) elapsed_time = time.time() - st print("Process Create INDEX MAPS took: %4.1f seconds" % elapsed_time) ADD_ID_MAP = {} ADD_ID_MAP["LAU2_ID_NUMBER"] = ARR_LAU2_ID_NUMBER ADD_ID_MAP["NUTS_ID_NUMBER"] = ARR_NUTS_ID_NUMBER """ At this stage, only the following ha-Maps are required: ARR_LAU2_ID_NUMBER: int32 ARR_NUTS_ID_NUMBER: uint16 """ ####################################### # # Cut population raster # ####################################### print("\nProcess 0 Population") FullRasterPath = self.pop_raster_path_full OutPutRasterPath = self.pop_raster_path data_type = self.datatype_int create_new = True if (process_cut_pop_layer == False and self.LOAD_DATA_PREVIOUS == True and os.path.exists(OutPutRasterPath) == True): create_new = False try: arr_pop_cut, geotransform_obj_1km = SF.rrl(OutPutRasterPath, data_type=data_type) except Exception as e: print("Canot import %s" % self.pop_raster_path) print(e) create_new = True if 1 == 1 or create_new == True: arr_pop_cut, geotransform_obj_1km = CRL.clip_raster_layer( FullRasterPath, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) # Load JRC Population Raster (JRC POP 250m Raster transformed to 1km) arr_JRC_pop_cut, geotransform_obj_1km = CRL.clip_raster_layer( self.JRC_POP250_1km_raster_path, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) #for pixel for with arr_pop_cut Matrix == Zero -> add 30% the value of the JRC Population Map EMPTY_MATRIX = np.zeros_like(arr_JRC_pop_cut) idxM = arr_pop_cut == 0 # Add only 30% of value, because adding population if the data # between JRC and main Layer are shifted (which is the case) # creates a distortion towards low densely populated areas EMPTY_MATRIX[idxM] = 0.3 * arr_JRC_pop_cut[idxM] EMPTY_MATRIX[EMPTY_MATRIX < 3] = 0 EMPTY_MATRIX[EMPTY_MATRIX > 20000] = 20000 """(arr_pop100_no_av, geotransform_obj) = hr.HighResArray(arr_pop_cut , ResIncreaseFactor, geotransform_obj_1km) arr_pop100_no_av = arr_pop100_no_av / ResIncreaseFactor ** 2 TABLE_LAU_O = CDM.CreateResultsTableperIndicator(arr_pop100_no_av, ARR_LAU2_ID_NUMBER) TABLE_NUTS_O = CDM.CreateResultsTableperIndicator(arr_pop100_no_av, ARR_NUTS_ID_NUMBER) """ arr_pop_cut += EMPTY_MATRIX.astype(self.datatype_int) SaveLayerDict["population_raster"] = [ OutPutRasterPath, geotransform_obj_1km, data_type, arr_pop_cut, self.noDataValue ] if EXPORT_AT_THE_END == False: SaveLayerDict = expLyr(SaveLayerDict) elapsed_time = time.time() - st print("Process 4 took: %4.1f seconds" % elapsed_time) ####################################### # #End Cut population raster # ####################################### print("Preparation (Clip initial raster file) took: %4.1f " % (time.time() - start_time)) ####################################### # # Corine Landcover raster # ####################################### print("\nProcess 1a Corine Landcover data") st = time.time() FullRasterPath = self.Corine_path_full OutPutRasterPath = self.Corine_share_path OutPutRasterPath_POP_weight = self.Corine_share_path_POP_weight data_type = self.datatype create_new = True if (process1a == False and self.LOAD_DATA_PREVIOUS == True and os.path.exists(OutPutRasterPath) == True): create_new = False try: arr_corine_share, geotransform_obj = SF.rrl( OutPutRasterPath, data_type=data_type) POPweight_corineLC, geotransform_obj = SF.rrl( OutPutRasterPath_POP_weight, data_type=data_type) except: create_new = True if create_new == True: # cuts Corince Land Cover to same size as REFERENCE layer # Save as raster layer arr_corine_cut, geotransform_obj = CRL.clip_raster_layer( FullRasterPath, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) # Transform CLC Code to Building shares arr_corine_share = ( CORINE_LANDCOVER_TRANSFORM_MATRIX[arr_corine_cut]) POPweight_corineLC = (CORINE_LANDCOVER_POPWeight[arr_corine_cut]) if DEBUG == True: SaveLayerDict["corine_raster"] = [ self.Corine_path, geotransform_obj, self.datatype_int, arr_corine_cut, self.noDataValue ] SaveLayerDict = expLyr(SaveLayerDict) del arr_corine_cut SaveLayerDict["corine_share_raster"] = [ OutPutRasterPath, geotransform_obj, data_type, arr_corine_share, self.noDataValue ] SaveLayerDict["corine_pop_weight_raster"] = [ OutPutRasterPath_POP_weight, geotransform_obj, data_type, POPweight_corineLC, self.noDataValue ] elapsed_time = time.time() - st print("Process 1a: Cut and Calc Corine Shares took: %4.1f seconds" % elapsed_time) ####################################### # # End Corine Landcover raster # ####################################### SaveLayerDict = expLyr(SaveLayerDict) """ At this stage, only the following ha-Maps are required: ARR_LAU2_ID_NUMBER: int32 ARR_NUTS_ID_NUMBER: uint16 POPweight_corineLC: float32 arr_corine_share: float32 """ ####################################### # # Transform POPUlation Layer to 100 meter raster # ####################################### print("\nProcess 2: Transform Population layer to %i Meter Raster" % TARGET_RESOLUTION) st = time.time() create_new = True OutPutRasterPath = self.pop_raster_100m_path data_type = self.datatype if (process2 == False and self.LOAD_DATA_PREVIOUS == True and os.path.exists(OutPutRasterPath) == True): create_new = False try: ARR_POPULATION_100, geotransform_obj = SF.rrl( OutPutRasterPath, data_type=data_type) arr_pop100_1km, geotransform_obj = SF.rrl( "%s_100_1km.tif" % OutPutRasterPath[:-4], data_type=data_type) except: create_new = True if create_new == True: # cuts SOIL Sealing cuts to same size as REFERENCE layer, smaller data processing (Values above 100%...) # Save as raster layer (arr_pop100_no_av, geotransform_obj) = hr.HighResArray(arr_pop_cut, ResIncreaseFactor, geotransform_obj_1km) arr_pop100_1km = arr_pop100_no_av.copy() + 0.0001 arr_pop100_no_av = arr_pop100_no_av.astype( data_type) / ResIncreaseFactor**2 SaveLayerDict["data_pop100_1km"] = ("%s_100_1km.tif" % OutPutRasterPath[:-4], geotransform_obj, data_type, arr_pop100_1km, self.noDataValue) print("HighRes took: %4.2f sec " % (time.time() - st)) st1 = time.time() print(np.sum(arr_pop100_no_av)) if DEBUG == True: print(np.sum(arr_pop100_no_av)) SaveLayerDict["data_pop100_no_average"] = ("%s_before" % OutPutRasterPath, geotransform_obj, data_type, arr_pop100_no_av, self.noDataValue) SaveLayerDict = expLyr(SaveLayerDict) ARR_POPULATION_100 = SOHR.CalcAverageBased(arr_pop100_no_av, 10, 6, 1, POPweight_corineLC) del POPweight_corineLC del arr_pop100_no_av #no impact(?) Memory is still needed for ARR_POPULATION_100! print("CalcAverageBased took: %4.2f sec " % (time.time() - st1)) SaveLayerDict["data_pop100"] = (OutPutRasterPath, geotransform_obj, data_type, ARR_POPULATION_100, self.noDataValue) else: del POPweight_corineLC self.geotransform_obj_high_res = geotransform_obj self.arr_size_high_res = ARR_POPULATION_100.shape elapsed_time = time.time() - st print("Process 2 took: %4.1f seconds" % elapsed_time) ####################################### # # End Transform POPUlation Layer to 100 meter raster # ####################################### if EXPORT_AT_THE_END == False: SaveLayerDict = expLyr(SaveLayerDict) """ At this stage, the following ha-Maps are required: ARR_NUTS_ID_NUMBER: uint16 ARR_LAU2_ID_NUMBER: int32 arr_corine_share: float32 ARR_POPULATION_100: float32 arr_pop100_1km: float32 """ ####################################### # # Cut Soil Sealing raster # ####################################### print("\nProcess 1 SoilSealing") st = time.time() create_new = True FullRasterPath = self.SoilSeal_path_full OutPutRasterPath = self.SoilSeal_path data_type = "float32" if (process1 == False and self.LOAD_DATA_PREVIOUS == True and os.path.exists(OutPutRasterPath) == True): create_new = False try: arr_soilseal_cut, geotransform_obj = SF.rrl( OutPutRasterPath, data_type=data_type) except: create_new = True if create_new == True: # cuts SOIL Sealing cuts to same size as REFERENCE layer, smaller data processing (Values above 100%...) # Save as raster layer arr_soilseal_cut, geotransform_obj = CRL.clip_raster_layer( FullRasterPath, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) arr_soilseal_cut = np.maximum(0, arr_soilseal_cut) arr_soilseal_cut = np.minimum(100, arr_soilseal_cut) SaveLayerDict["soilseal_raster"] = [ OutPutRasterPath, geotransform_obj, data_type, arr_soilseal_cut, self.noDataValue ] elapsed_time = time.time() - st print("Process 1: Cut SoilSealing took: %4.1f seconds" % elapsed_time) ####################################### # # End Cut Soil Sealing raster # ####################################### if EXPORT_AT_THE_END == False: SaveLayerDict = expLyr(SaveLayerDict) """ At this stage, the following ha-Maps are required: ARR_NUTS_ID_NUMBER: uint16 ARR_LAU2_ID_NUMBER: int32 arr_corine_share: float32 ARR_POPULATION_100: float32 arr_pop100_1km: float32 arr_soilseal_cut: float32 """ ####################################### # # Calculate Distribution function within each population grid cell # ####################################### # Calculate the relative shares of SoilSealingxCorineLandCover Distribution [100m2] within # original Population Raster [1km2] print("\nProcess 3 Create Density within Population Grid") st = time.time() OutPutRasterPath = self.density_function_per_PopGrid data_type = self.datatype create_new = True if (process3 == False and self.LOAD_DATA_PREVIOUS == True and os.path.exists(OutPutRasterPath) == True): create_new = False try: RelativeDensityDistributionWithinPopRaster, geotransform_obj = SF.rrl( OutPutRasterPath, data_type=data_type) except: create_new = True if create_new == True: density_indicator = arr_corine_share * arr_soilseal_cut / 100.0 #ResIncreaseFactor = PopulationRasterResolution / TARGET_RESOLUTION RelativeDensityDistributionWithinPopRaster = _process3( density_indicator, ResIncreaseFactor) if DEBUG == True: SaveLayerDict["density_indicator"] = [ self.CorineSoilSeal_share_path, geotransform_obj, data_type, density_indicator, self.noDataValue ] SaveLayerDict["relative_density_indicator"] = [ OutPutRasterPath, geotransform_obj, data_type, RelativeDensityDistributionWithinPopRaster, self.noDataValue ] SaveLayerDict = expLyr(SaveLayerDict) del density_indicator del arr_corine_share DENSITY_INDICATOR_MAP_POP = ( RelativeDensityDistributionWithinPopRaster * ARR_POPULATION_100) DENSITY_INDICATOR_MAP_POP_1km = SOHR.CalcLowResSum( DENSITY_INDICATOR_MAP_POP, ResIncreaseFactor) Ratio = (arr_pop_cut + 0.1) / (DENSITY_INDICATOR_MAP_POP_1km + 0.1) Ratio = np.maximum(0.5, np.minimum(10000, Ratio)) Ratio_100m = SOHR.CalcHighRes(Ratio, ResIncreaseFactor) DENSITY_INDICATOR_MAP_POP *= Ratio_100m SaveLayerDict["DENSITY_INDICATOR_MAP_POP"] = [ "%s/%s.tif" % (self.temp_path, "__DENSITY_INDICATOR_MAP_POP"), geotransform_obj, data_type, DENSITY_INDICATOR_MAP_POP, self.noDataValue ] SaveLayerDict = expLyr(SaveLayerDict) del RelativeDensityDistributionWithinPopRaster, Ratio_100m ####################################### # # End: Calculate Distribution function within each population grid cell # ####################################### """ At this stage, the following ha-Maps are required: ARR_NUTS_ID_NUMBER: uint16 ARR_LAU2_ID_NUMBER: int32 arr_corine_share: float32 --> Deleted ARR_POPULATION_100: float32 arr_pop100_1km: float32 arr_soilseal_cut: float32 DENSITY_INDICATOR_MAP_POP: float32 """ print("\n\n\n######\n\nProcess took: %4.1f seconds so far\n\n\n" % (time.time() - start_time)) ####################################### # # LOAD CSV DATA on POPULATION per LAU region # And correct data for LAU and RASTER # Modifies: # DENSITY_INDICATOR_MAP_POP # ####################################### if "noLAUcorr" not in self.temp_path: (LAU_DATA, EXPORT_COLUMNS, SCALING_FACTOR, CUTOFF_Value) = RCD.READ_CSV_DATA(self.csv_input_data_LAU2_path) A = np.zeros(LAU_DATA['POP_2012'].shape[0], dtype="f4") A[0:] = LAU_DATA['POP_2012'] print(np.max(DENSITY_INDICATOR_MAP_POP)) TABLE_RESULTS = CDM.CreateResultsTableperIndicator( DENSITY_INDICATOR_MAP_POP, ARR_LAU2_ID_NUMBER) num_run = 1 TAB_res = np.zeros((TABLE_RESULTS.shape[0], 2 + num_run)) TAB_res[:, :2] = TABLE_RESULTS max_pop = self.calc_max_pop(arr_pop100_1km) for ii in range(num_run): print("########\nRun %i" % ii) # limit population per hectare DENSITY_INDICATOR_MAP_POP = np.minimum( DENSITY_INDICATOR_MAP_POP, max_pop) #print(np.sum(DENSITY_INDICATOR_MAP_POP)) DENSITY_INDICATOR_MAP_POP = CDM.ScaleResultsWithTableperIndicator( DENSITY_INDICATOR_MAP_POP, ARR_LAU2_ID_NUMBER, A, elasticitiy=0.66, maxscale=1.3) #print(np.sum(DENSITY_INDICATOR_MAP_POP)) """ SaveLayerDict["POP_new"] = ["%s/%s_run_%i.tif" %(self.temp_path, "POP_new_unrestrict", ii), geotransform_obj , data_type , DENSITY_INDICATOR_MAP_POP , self.noDataValue] SaveLayerDict = expLyr(SaveLayerDict) """ DENSITY_INDICATOR_MAP_POP_1km = SOHR.CalcLowResSum( DENSITY_INDICATOR_MAP_POP, ResIncreaseFactor) Ratio = ((arr_pop_cut + 0.1) / (DENSITY_INDICATOR_MAP_POP_1km + 0.1))**0.66 #sys.exit() #Ratio = np.maximum(0.5, np.minimum(1.5, Ratio)) Ratio_100m = SOHR.CalcHighRes(Ratio, ResIncreaseFactor) DENSITY_INDICATOR_MAP_POP *= Ratio_100m # limit population per hectare DENSITY_INDICATOR_MAP_POP = np.minimum( DENSITY_INDICATOR_MAP_POP, max_pop) SaveLayerDict["POP_new"] = [ "%s/%s_run_%i.tif" % (self.temp_path, "POP_new", ii), geotransform_obj, data_type, DENSITY_INDICATOR_MAP_POP, self.noDataValue ] SaveLayerDict = expLyr(SaveLayerDict) print(np.max(DENSITY_INDICATOR_MAP_POP)) TABLE_RESULTS = CDM.CreateResultsTableperIndicator( DENSITY_INDICATOR_MAP_POP, ARR_LAU2_ID_NUMBER) TAB_res[:, ii + 2] = TABLE_RESULTS[:, 1] fn_ = ("%s/%s.csv" % (self.temp_path, "TEST_ARR_POPULATION_LAU_X")) np.savetxt(fn_, np.round(TAB_res, 0), delimiter=",") del DENSITY_INDICATOR_MAP_POP_1km, Ratio, Ratio_100m del max_pop, arr_pop100_1km ####################################### # # End: Modify GRID POPLUATION (DENSITY_INDICATOR_MAP_POP) based on LAU Population # ####################################### SaveLayerDict = expLyr(SaveLayerDict) print("CALC POPULATION finished")
def main(main_path, path_in_raw, preproccessed_input_path, prj_path_output): st = time.time() data_type = "uint8" MOST_RECENT_CUT = main_path + prj_path_output + "/MOST_RECENT_CUT.pk" prepro_path = main_path + preproccessed_input_path org_data_path = main_path + path_in_raw p_ = org_data_path pi_ = org_data_path + "/vector_input_data/" NUTS3_vector_path = pi_ + "/NUTS3.shp" strd_raster_path_full = "%s/%s" % (org_data_path, "Population.tif") temp_path = "/home/simulant/workspace/project/Hotmaps_DATA/heat_density_map/output_2/" + os.sep + "Temp" SoilSeal_path_full = "%s/%s" % (org_data_path, "_____ESM100m_final.tif") #p_ = "/home/simulant/workspace/project/Hotmaps_DATA/heat_density_map/output/" sd = "" print(os.path.exists(p_)) print(os.path.exists(pi_)) fn = [] NUTS3_feat_id_LIST = range(12000) (REFERENCE_RasterResolution, HighRes_gt_obj, LOAD_DATA_PREVIOUS , Ref_layer_is_uncut, REFERENCE_geotransform_obj, REFERENCE_RasterSize) = \ load_reference_raster_lyr(NUTS3_vector_path, strd_raster_path_full, temp_path, NUTS3_feat_id_LIST , MOST_RECENT_CUT) for f_ in os.listdir("%s/%s" % (p_, sd)): if f_.endswith(".tif"): fn.append("%s/%s/%s" % (p_, sd, f_)) print(f_) if "g100_clc12_v18_5" in f_.lower(): data, geotransform_obj = CRL.clip_raster_layer( fn[-1], REFERENCE_geotransform_obj, REFERENCE_RasterSize) data2 = np.zeros((data.shape), dtype="f4") data3 = np.zeros_like(data2) data4 = np.ones_like(data2) * 10.0 # 1000 m2 data2[data <= 21] = 10.0 data3[data <= 6] = 10.0 data3[data == 9] = 10.0 data3[data == 10] = 10.0 data3[data == 11] = 10.0 data3[data == 20] = 10.0 print(np.sum(data2)) print(np.sum(data3)) print(np.sum(data4)) elif "ESM100m_final" in f_: data5, geotransform_obj = CRL.clip_raster_layer( fn[-1], REFERENCE_geotransform_obj, REFERENCE_RasterSize) data5 *= 10.0 / 100.0 # in 1000 m2, data5 Einheit = % print(np.sum(data5)) print(time.time() - st) ARR_NUTS_ID_NUMBER, geotransform_obj = SF.rrl("%s/%s_id_number.tif" % (prepro_path, "NUTS3"), data_type="uint16") print(time.time() - st) ARR_LAU2_ID_NUMBER, geotransform_obj = SF.rrl("%s/%s_id_number.tif" % (prepro_path, "LAU2"), data_type="uint32") print(time.time() - st) #num_fn = len(fn) num_fn = 4 RES_Table_NUTS = np.zeros((np.max(ARR_NUTS_ID_NUMBER) + 1, num_fn + 1), "f4") RES_Table_LAU = np.zeros((np.max(ARR_LAU2_ID_NUMBER) + 1, num_fn + 1), "f4") RES_Table_NUTS[:, 0] = np.arange(RES_Table_NUTS.shape[0]) RES_Table_LAU[:, 0] = np.arange(RES_Table_LAU.shape[0]) header = ["DI"] #for i, f_ in enumerate(fn): for i in range(num_fn): #print(f_) if i == 0: data = data2.copy() fn = "dauersiedlungsraum" elif i == 1: data = data3.copy() fn = "dauersiedlungsraum_eng" elif i == 2: data = data4.copy() fn = "flaeche" else: data = data5.copy() fn = "ESM100m_final" print(fn) header.append(fn) print(np.sum(data)) #header.append(f_.split("/")[-1]) #data, geotransform_obj = SF.rrl(f_, data_type=data_type) TABLE_RESULTS_NUTS = CDM.CreateResultsTableperIndicator( data, ARR_NUTS_ID_NUMBER) print(time.time() - st) TABLE_RESULTS_LAU = CDM.CreateResultsTableperIndicator( data, ARR_LAU2_ID_NUMBER) del data print(time.time() - st) RES_Table_NUTS[:, i + 1] = TABLE_RESULTS_NUTS[:, -1] RES_Table_LAU[:, i + 1] = TABLE_RESULTS_LAU[:, -1] #break header = ",".join(header) np.savetxt("%s/%s.csv" % (prepro_path, "__TABLE_RES_LAU2"), np.round(RES_Table_LAU, 3), delimiter=",", header=header, comments="") np.savetxt("%s/%s.csv" % (prepro_path, "__TABLE_RES_NUTS"), np.round(RES_Table_NUTS, 3), delimiter=",", header=header, comments="") print("DONE")
def calc_shares_construction_period(self): key_field = "NUTS_ID" RESOLUTION = "100" self.REFERENCE_RASTER_LAYER_COORD, uncut = CRL.create_reference_raster_layer_origin_extent_of_vctr_feat( self.strd_raster_path_full, self.strd_vector_path_NUTS, [], Vctr_key_field=key_field) (self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize, self.REFERENCE_RESOLUTION, self.REFERENCE_extent) = self.REFERENCE_RASTER_LAYER_COORD SoilSeal2012, geotransform_obj = CRL.clip_raster_layer( self.SoilSealingFile, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) SoilSealBuildUp, geotransform_obj = CRL.clip_raster_layer( self.BUILD_UP_SoilSealingFIle, self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize) fn_14 = "%s/GHS_BUILT_LDS2014_3035_new_%s.tif" % ( self.SoilSeal_output_data_path, RESOLUTION) fn_share_14 = "%s/GHS_BUILT_2014_%s_share.tif" % ( self.SoilSeal_output_data_path, RESOLUTION) SoilSeal_14, geotransform_obj = SF.rrl(fn_14, data_type=self.datatype) SoilSeal2012[SoilSeal2012 > 100] = 0 Estimated_share_build_SoilSeal = np.minimum( 1, SoilSealBuildUp / (0.0001 + 0.5 * (SoilSeal_14 + SoilSeal2012 / 100.0))) del SoilSealBuildUp, SoilSeal2012 SaveLayerDict = {} fn_75 = "%s/GHS_BUILT_LDS1975_3035_new_%s.tif" % ( self.SoilSeal_output_data_path, RESOLUTION) fn_share_75 = "%s/GHS_BUILT_1975_%s_share.tif" % ( self.SoilSeal_output_data_path, RESOLUTION) fn_90 = "%s/GHS_BUILT_LDS1990_3035_new_%s.tif" % ( self.SoilSeal_output_data_path, RESOLUTION) fn_share_90 = "%s/GHS_BUILT_1990_%s_share.tif" % ( self.SoilSeal_output_data_path, RESOLUTION) fn_00 = "%s/GHS_BUILT_LDS2000_3035_new_%s.tif" % ( self.SoilSeal_output_data_path, RESOLUTION) fn_share_00 = "%s/GHS_BUILT_2000_%s_share.tif" % ( self.SoilSeal_output_data_path, RESOLUTION) print("Load Images") SoilSeal_75, geotransform_obj = SF.rrl(fn_75, data_type=self.datatype) SoilSeal_90, geotransform_obj = SF.rrl(fn_90, data_type=self.datatype) SoilSeal_00, geotransform_obj = SF.rrl(fn_00, data_type=self.datatype) print("Loaded") SoilSeal_7590 = np.maximum(SoilSeal_90 * 0.0075, SoilSeal_90 - SoilSeal_75 * (1 - 0.001)**15) SoilSeal_9000 = np.maximum( SoilSeal_00 * 0.0075, SoilSeal_00 - SoilSeal_7590 - SoilSeal_75 * (1 - 0.001)**25) SoilSeal_0014 = np.maximum( SoilSeal_14 * 0.0075, SoilSeal_14 - SoilSeal_9000 - SoilSeal_7590 * (1 - 0.001)**25 - SoilSeal_75 * (1 - 0.001)**40) SaveLayerDict["SoilSeal7590"] = [ "%s_CP7590.tif" % fn_share_90[:-9], geotransform_obj, self.datatype, SoilSeal_7590, self.noDataValue ] SaveLayerDict["SoilSeal9000"] = [ "%s_CP9000.tif" % fn_share_00[:-9], geotransform_obj, self.datatype, SoilSeal_9000, self.noDataValue ] SaveLayerDict["SoilSeal0014"] = [ "%s_CP0014.tif" % fn_share_14[:-9], geotransform_obj, self.datatype, SoilSeal_0014, self.noDataValue ] SaveLayerDict = _export_layer(SaveLayerDict) # Estimated Soil Sealing in 2014 per construction period SS_be75 = SoilSeal_75 * (1 - 0.001)**40 * ( 1 + Estimated_share_build_SoilSeal) / 2 SaveLayerDict["Estimated_share_no_build_SoilSeal"] = [ "%s/Estimated_share_build_SoilSeal.tif" % self.SoilSeal_output_data_path, geotransform_obj, self.datatype, Estimated_share_build_SoilSeal, self.noDataValue ] SaveLayerDict = _export_layer(SaveLayerDict) del Estimated_share_build_SoilSeal SS_7590 = SoilSeal_7590 * (1 - 0.001)**25 SS_9000 = SoilSeal_9000 SS_0014 = SoilSeal_0014 TotalSum_Soil_Seal = np.maximum(0.000001, SS_be75 + SS_7590 + SS_9000 + SS_0014) SS_be75 /= TotalSum_Soil_Seal SS_7590 /= TotalSum_Soil_Seal SS_9000 /= TotalSum_Soil_Seal SS_0014 /= TotalSum_Soil_Seal del TotalSum_Soil_Seal SaveLayerDict["SoilSealbe7590"] = [ fn_share_75, geotransform_obj, self.datatype, SS_be75, self.noDataValue ] SaveLayerDict["SoilSeal7590"] = [ fn_share_90, geotransform_obj, self.datatype, SS_7590, self.noDataValue ] SaveLayerDict["SoilSeal9000"] = [ fn_share_00, geotransform_obj, self.datatype, SS_9000, self.noDataValue ] SaveLayerDict["SoilSeal0014"] = [ fn_share_14, geotransform_obj, self.datatype, SS_0014, self.noDataValue ] CP_based_WeightedEnergyIndicator = (SS_9000 * 1 + SS_0014 * 0.8 + SS_7590 * 1.25 + SS_be75 * 1.25) SaveLayerDict["CPbased_EnergyIntensity"] = [ self.CPbasedEnergyIntensity_fn_path, geotransform_obj, self.datatype, CP_based_WeightedEnergyIndicator, self.noDataValue ] SaveLayerDict = _export_layer(SaveLayerDict) print("DONE!")
def CreateNewSoilSeal_TIME_layer(self): key_field = "NUTS_ID" self.REFERENCE_RASTER_LAYER_COORD = CRL.create_reference_raster_layer_origin_extent_of_vctr_feat( self.strd_raster_path_full, self.strd_vector_path_NUTS, [], Vctr_key_field=key_field) (self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize, self.REFERENCE_RESOLUTION, self.REFERENCE_extent), uncut = self.REFERENCE_RASTER_LAYER_COORD SaveLayerDict = {} SaveLayerDict["Reference"] = [ "%s/REFERENCE.tiff" % self.outputdir_path, self.REFERENCE_geotransform_obj, self.datatype_int, np.ones((self.REFERENCE_RasterSize), dtype=self.datatype_int), self.noDataValue ] SaveLayerDict = _export_layer(SaveLayerDict) """ arr_pop_cut, geotransform_obj = CRL.clip_raster_layer(self.strd_raster_path_full , self.REFERENCE_geotransform_obj , self.REFERENCE_RasterSize) """ SaveLayerDict = {} st_n = time.time() for period in [1975, 1990, 2000, 2014]: # fn = "%s/GHS_BUILT_LDS%s_3035.tif" % ( self.SoilSeal_input_data_path, str(period)) fn_new_res = "%s/GHS_BUILT_LDS%s_3035_new.tif" % ( self.SoilSeal_output_data_path, str(period)) print(" : Image %s -> %s" % (period, fn)) SS_period_240, geotransform_obj = SF.rrl(fn, data_type=self.datatype) st0 = time.time() RESOLUTION = (FINAL_TARGET_RESOLUTION / geotransform_obj[1], FINAL_TARGET_RESOLUTION / -geotransform_obj[5]) print(np.sum(SS_period_240) / 10**6) RESULTS_MATRIX = RSM.reshapeM(SS_period_240, RESOLUTION, adopt_values=0) #RESULTS_MATRIX = RSMpy.reshapeM(POPnew_240, RESOLUTION) print("Reshape Image took: %4.1f sec" % (time.time() - st0)) print(np.sum(RESULTS_MATRIX) / 10**6) print(np.max(RESULTS_MATRIX)) print(np.min(RESULTS_MATRIX)) print(np.sum(RESULTS_MATRIX > 1000)) geotransform_obj_newRes = (geotransform_obj[0], FINAL_TARGET_RESOLUTION, 0, geotransform_obj[3], 0, -FINAL_TARGET_RESOLUTION) """ SaveLayerDict["popnewres"] = [fn_new_res, geotransform_obj_newRes , self.datatype , RESULTS_MATRIX , self.noDataValue] #SaveLayerDict = _export_layer(SaveLayerDict) """ print(" clip_raster_layer array") (RESULTS_MATRIX_clipped, geotransform_obj_clipped) = CRL.clip_raster_layer( [RESULTS_MATRIX, geotransform_obj_newRes], self.REFERENCE_geotransform_obj, self.REFERENCE_RasterSize, return_offset_list=False, final_res=1000) (data_SS_CLC_500_m) = SOHR.CalcLowResSum(RESULTS_MATRIX_clipped, 5) geotransform_obj_clipped_500 = (geotransform_obj_clipped[0], geotransform_obj_clipped[1] * 5, 0, geotransform_obj_clipped[3], 0, geotransform_obj_clipped[5] * 5) (data_SS_CLC_1_km) = SOHR.CalcLowResSum(data_SS_CLC_500_m, 2) geotransform_obj_clipped_1km = (geotransform_obj_clipped[0], geotransform_obj_clipped[1] * 10, 0, geotransform_obj_clipped[3], 0, geotransform_obj_clipped[5] * 10) SaveLayerDict["SoilSealnewres_100"] = [ "%s_100.tif" % fn_new_res[:-4], geotransform_obj_clipped, self.datatype, RESULTS_MATRIX_clipped, self.noDataValue ] SaveLayerDict["SoilSealnewres_500"] = [ "%s_500.tif" % fn_new_res[:-4], geotransform_obj_clipped_500, self.datatype, data_SS_CLC_500_m, self.noDataValue ] SaveLayerDict["SoilSealnewres_1km"] = [ "%s_1km.tif" % fn_new_res[:-4], geotransform_obj_clipped_1km, self.datatype, data_SS_CLC_1_km, self.noDataValue ] SaveLayerDict = _export_layer(SaveLayerDict) print("Done") print("Process took: %4.1f sec" % (time.time() - st_n)) print("DONE!")