def set_landcover_config(self, path_to_lc_config=None): """ read in the land cover config file holding the land cover classes for setting up the lookup table using the ProSAIL radiative transfer model Parameters ---------- path_to_lc_config : String optinal, path and filename of config-file for land cover classes Returns ------- path_to_config : String definite location of the config file or error if file not found """ # in case path_to_config is None use the default location in the root # of OBIA4RTM (prosail.cfg) if path_to_lc_config is None: path_to_lc_config = self.__directory + os.sep + 'landcover.cfg' if not os.path.isfile(path_to_lc_config): self.__logger.error("Unable to locate the config file for land cover classes!") close_logger(self.__logger) sys.exit(error_message) # endif return path_to_lc_config
def create_function_statement(sql_function, logger): """ create a SQL statement for creating/ replacing a SQL function Parameters ---------- sql_function : String file-path to the sql-function logger : logging.Logger for logging errors Returns ------- sql_statement : String processed and ready-to-execute sql statement """ try: fopen = open(sql_function, "r") lines = fopen.readlines() fopen.close() except IOError: logger.error('Failed to read the SQL-script\nReason:', exc_info=True) close_logger(logger) sys.exit(sys_exit_message) # extract the SQL statement # '--' indicates comments comment = '--' sql_statement = [ ''.join(f.replace("\n", "")) for f in lines if comment not in f ] sql_statement = ''.join(map(str, sql_statement)) return sql_statement
def read_params_per_class(prosail_cfg, landcover_cfg, logger): """ reads in the vegetation parameters for the ProSAIL model for each land cover class Parameters ---------- cfg_file : String path to the ProSAIL configurations file landcover_cfg : String path to the landcover configuration file logger : logging.Logger for recording errors Returns ------- container : Dictionary dict with the ProSAIL parameters per land cover class """ luc_classes = get_landcover_classes(landcover_cfg) # now read in the actual data n_classes = len(luc_classes) # number of land cover classes try: assert n_classes >= 1 except AssertionError: logger.error('Error: >=1 land cover class must be provided!', exc_info=True) close_logger(logger) sys.exit(-1) num_lines_per_luc = 13 # number of lines per land cover class # loop over the land cover classes, store results in dictionary container = dict() try: values = np.genfromtxt(prosail_cfg, skip_header=0) except ValueError: logger.error('Failed to read in the config-File', exc_info=True) close_logger(logger) sys.exit(-1) offset_rows = 0 for section in luc_classes: # read in the params per land cover class using numpy # class_name = section[1] vals_per_class = values[offset_rows:offset_rows + num_lines_per_luc, :] # store in "container" dictionary container[section] = vals_per_class # increment offset_rowsfor next iteration offset_rows += num_lines_per_luc return container
def create_sql_statement(sql_file, schema, table_name, logger): """ auxiiliary function to create the sql_statement required to create the specific tables in the DB schema Parameters ---------- sql_file : String file-path to the sql-template containing the sql-statement for creating the table schema : String name of the schema the table should be created in table_name : String name of the table to be created logger : logging.Logger for logging errors Returns ------- sql_statement : String processed and ready-to-execute sql statement """ try: fopen = open(sql_file, "r") lines = fopen.readlines() fopen.close() except IOError: logger.error('Failed to read the SQL-script\nReason:', exc_info=True) close_logger(logger) sys.exit(sys_exit_message) # extract the SQL statement # '--' indicates comments comment = '--' sql_statement = [ ''.join(f.replace("\n", "")) for f in lines if comment not in f ] sql_statement = ''.join(map(str, sql_statement)) # now, replace "schema_name" and "table_name" with their actual values sql_statement = sql_statement.replace('schema_name', schema) sql_statement = sql_statement.replace('table_name', table_name) return sql_statement
def surface_reflectance(self, s, bandname): """ Calculate surface reflectance from at-sensor radiance given waveband name using the 6S algorithm Parameters ---------- s : Six6 object object of Py6S class bandname : String name of the Sentinel-2 spectral band to be processed, only the nine Sentinel-2 bands used in OBIA4RTM are processed Returns ------- ref : ee.image.Image surface reflectance """ # run 6S for this waveband s.wavelength = self.spectralResponseFunction(bandname) try: s.run() # extract 6S outputs Edir = s.outputs.direct_solar_irradiance #direct solar irradiance Edif = s.outputs.diffuse_solar_irradiance #diffuse solar irradiance Lp = s.outputs.atmospheric_intrinsic_radiance #path radiance absorb = s.outputs.trans['global_gas'].upward #absorption transmissivity scatter = s.outputs.trans['total_scattering'].upward #scattering transmissivity tau2 = absorb * scatter #total transmissivity except OutputParsingError: self.__logger.error('Failed to read 6S outputs!', exc_info=True) close_logger(self.__logger) sys.exit(-1) # radiance to surface reflectance rad = self.toa_to_rad(bandname) ref = rad.subtract(Lp).multiply(math.pi).divide(tau2*(Edir+Edif)) return ref
def __init__(self): """ class constructor and basic setup of processing environment """ # find the directory the 6S binary has been installed to # this is a sub-directory of the OBIA4RTM_HOME with open(os.path.dirname(OBIA4RTM.__file__) + os.sep + 'OBIA4RTM_HOME', 'r') as data: obia4rtm_dir = data.readline() self.sixS_install_dir = obia4rtm_dir + os.sep + 'sixS'+ os.sep + \ 'src' + os.sep + '6SV1.1' # make sure that 6S is installed if not os.path.isdir(self.sixS_install_dir): print("Error: 6S is not installed on your computer or cannot be found!\n"\ "Expected installation location: '{}'\n"\ "You might want to run OBIA4RTM.S2_PreProcessor.install_6S "\ "for installing 6S".format(self.sixS_install_dir)) # add this directory of 6S binary to system path temporally (does not work properly) # sys.path.append(sixS_install_dir) # get a logger for recording sucess and error messages self.__logger = get_logger() self.__logger.info('Setting up Google EE environment for Sentinel-2 '\ 'atmospheric correction using the 6S algorithm') try: ee.Initialize() except EEException: self.__logger.error('No (valid) Earth-Engine credentials provided!', exc_info=True) close_logger(self.__logger) sys.exit(-1) # for storing the metadata self.info = None # for the image object self.S2 = None # for the solar zenith angle and the scene timestamp self.solar_z = None self.scene_date = None
def get_scene_metadata(self): """ queries the relevant scene metadata from the database """ # check if cursor is closed -> if closed, reconnect to database if self.cursor.closed: self.conn, self.cursor = connect_db.connect_db() # database query query = "SELECT acquisition_time, "\ "scene_id, sun_zenith, obs_zenith, rel_azimuth, sensor "\ "FROM public.scene_metadata WHERE scene_id = '{}';".format( self.scene_id) try: self.cursor.execute(query) res = self.cursor.fetchall()[0] except DatabaseError: self.__logger.error("Querying scene meta for scene '{}' failed".format( self.scene_id), exc_info=True) close_logger(self.__logger) sys.exit(error_message) # make sure that res is not empty try: assert res is not None except AssertionError: self.__logger.error("Could not find metadata for scene '{}'".format( self.scene_id)) close_logger(self.__logger) sys.exit(error_message) # extract the desired information self.acquisition_time = res[0] self.acquisition_date = self.acquisition_time[0:10] self.scene_id = res[1] self.__tts = res[2] self.__tto = res[3] self.__psi = res[4] self.__sensor = res[5]
def set_soilrefl(self, path_to_soilrefl_file=None): """ set up the file-path to the txt file containing the soil-reflectance required for ProSAIL to account for the soil background and read in the values Parameters ---------- path_to_soilrefl_file : String optional, file to the txt file with soil reflectance values Returns ------- soils : Numpy Array array of soil reflectance values (1 nm steps) """ if path_to_soilrefl_file is None: path_to_soilrefl_file = self.__directory + os.sep + 'soil_reflectance.txt' if not os.path.isfile(path_to_soilrefl_file): self.__logger.error("Unable to locate the soil_reflectance.txt file!") close_logger(self.__logger) sys.exit(error_message) soils = np.genfromtxt(path_to_soilrefl_file) return soils
def get_bands(conn, cursor, sensor, logger): """ reads in sensor band centers and FWHM stored in database Parameters ---------- conn : psycopg2 Database connection connection to OBIA4RTM PostgreSQL database cursor : psycopg2 Database cursor cursor for DB inserts and queries sensor : String name of the sensor; currently either 'S2A' or 'S2B' logger : logging Logger for recording errors to the log file Returns ------- centers : List list of central wavelengths of the spectral bands of the sensor (nm) fwhm : List list of the full width half maximum of the spectral bands (nm) """ query = "SELECT central_wvl, band_width FROM public.s2_bands WHERE " \ "sensor = '{0}' ORDER by central_wvl;".format( sensor) try: cursor.execute(query) data = cursor.fetchall() centers = [item[0] for item in data] fwhm = [item[1] for item in data] except ValueError: logger.error("Could not retrieve sensor metatdata!", exc_info=True) close_logger(logger) sys.exit(-1) # endif return centers, fwhm
def gen_lut(self, inv_mapping_table, inv_table, landcover_config_path=None, prosail_config_path=None, soil_path=None): """ Generates the lookup table and stores it in the DB must be run seperately from the inversion part Parameters ---------- inv_mapping_table : String name of the table storing the inversion mapping required for performing the inversion inv_table : String Name of the table the lookup-table should be written to (<schema.table>) landcover_config_path : String file-path to landcover config file (opt.; per default the OBIA4RTM delivered file will be used) prosail_config_path : String file-path to landcover config file (opt.; per default the OBIA4RTM delivered file will be used) soil_path : String file-path to file with soil reflectance values (opt.; per default the OBIA4RTM delivered file will be used) Returns: -------- None """ # get scene metadata first self.get_scene_metadata() # basic setup first # get S2 sensor-response function resampler = get_resampler(self.conn, self.cursor, self.__sensor, self.__logger) # params that could be inverted list_of_params = ['n', 'cab', 'car', 'cbrown', 'cw', 'cm', 'lai', 'lidfa', 'lidfb', 'rsoil', 'psoil', 'hspot', 'typelidf'] # firstly, create the LUT from the params config file for the # defined land cover classes if prosail_config_path is None: prosail_config = self.set_ProSAIL_config() else: prosail_config = self.set_ProSAIL_config(prosail_config_path) if landcover_config_path is None: landcover_config = self.set_landcover_config() else: landcover_config = self.set_landcover_config(landcover_config_path) # default soil-spectra -> use soil_reflectance from ProSAIL package if soil_path is None: soils = self.set_soilrefl() else: soils = self.set_soilrefl(soil_path) # read in the landcover class information and the corresponding # prosail parameter setup params_container = read_params_per_class(prosail_config, landcover_config, self.__logger) # extract the land cover classes lc_keys = list(params_container.keys()) # loop over the land cover classes and generate the LUT per class for lc in lc_keys: # extract the land cover code and semantics lc_code = lc[0] # code lc_sema = lc[1] # meaning # get the ProSAIL parameters # if a land cover class is not found skip try: params = params_container.get(lc) except (ValueError, KeyError): self.__logger.warning("Land cover class '{}' specified in config "\ "file but not found in ProSAIL config - " " skipping".format(lc_code)) continue param_lut = lut.lookup_table() param_lut.generate_param_lut(params) print("INFO: Start to generate ProSAIL-LUT for class '{0}' with "\ "{1} simulations ('{2}')\n".format( lc_sema, param_lut.lut_size, self.scene_id)) params_inv = dict() for ii in range(param_lut.to_be_inv[0].shape[0]): params_inv[str(ii)] = list_of_params[param_lut.to_be_inv[0][ii]] # convert to json params_inv_json = json.dumps(params_inv) # write the metadata into the inversion_mapping table insert = "INSERT INTO {0} (acquisition_date, " \ "params_to_be_inverted, landuse, sensor, scene_id) " \ "VALUES('{1}', '{2}', {3}, '{4}', '{5}') "\ "ON CONFLICT(scene_id, landuse) DO NOTHING;".format( inv_mapping_table, self.acquisition_date, params_inv_json, lc_code, self.__sensor, self.scene_id) try: self.cursor.execute(insert) self.conn.commit() except DatabaseError: self.__logger.error("Failed to insert metadata of inversion process!", exc_info=True) close_logger(self.__logger) sys.exit(error_message) # loop over the parameters stored in the LUT and generate the # according synthetic spectra for ii in range(param_lut.lut_size): # run ProSAIL for each combination in the LUT try: n = param_lut.lut[0,ii] cab = param_lut.lut[1,ii] car = param_lut.lut[2,ii] cbrown = param_lut.lut[3,ii] cw = param_lut.lut[4,ii] cm = param_lut.lut[5,ii] lai = param_lut.lut[6,ii] lidfa = param_lut.lut[7,ii] lidfb = param_lut.lut[8,ii] rsoil = param_lut.lut[9,ii] psoil = param_lut.lut[10,ii] hspot = param_lut.lut[11,ii] typelidf = param_lut.lut[12,ii] except IndexError: self.__logger.error("No data available for land cover class "\ "'{}'".format(lc_code)) close_logger(self.__logger) return # run prosail in forward mode -> resulting spectrum is from # 400 to 2500 nm in 1nm steps # use Python ProSAIL bindings spectrum = prosail.run_prosail(n, cab, car, cbrown, cw, cm, lai, lidfa, hspot, self.__tts, self.__tto, self.__psi, ant=0.0, alpha=40., prospect_version="5", typelidf=typelidf, lidfb=lidfb, rsoil0=soils[:,0], rsoil=rsoil, psoil=psoil, factor="SDR") # resample to SRF of sensor # perform resampling from 1nm to S2-bands sensor_spectrum = resampler(spectrum) # convert to % reflectance sensor_spectrum *= 100. # store the results in DB insert_statement = "INSERT INTO {0} (id, n, cab, car, cbrown, "\ "cw, cm, lai, lidfa, lidfb, rsoil, psoil, "\ "hspot, tts, tto, psi, typelidf, "\ "b2, b3, b4, b5, b6, b7, b8a, b11, b12, "\ "acquisition_date, landuse, scene_id) "\ "VALUES ({1}, {2}, {3}, {4}, {5}, {6}, {7}, "\ "{8}, {9}, {10}, {11}, {12}, {13}, {14}, {15}, {16}, {17}, " \ "{18}, {19}, {20}, {21}, {22}, {23}, {24}, {25}, {26}, '{27}', " \ "{28}, '{29}') ON CONFLICT (id, scene_id, landuse) "\ "DO NOTHING;".format( inv_table, ii, np.round(n, 4), np.round(cab, 4), np.round(car, 4), np.round(cbrown, 4), np.round(cw, 4), np.round(cm, 4), np.round(lai, 4), np.round(lidfa, 4), np.round(lidfb, 4), np.round(rsoil, 4), np.round(psoil, 4), np.round(hspot, 4), np.round(self.__tts, 4), np.round(self.__tto, 4), np.round(self.__psi, 4), np.round(typelidf, 2), np.round(sensor_spectrum[0], 4), np.round(sensor_spectrum[1], 4), np.round(sensor_spectrum[2], 4), np.round(sensor_spectrum[3], 4), np.round(sensor_spectrum[4], 4), np.round(sensor_spectrum[5], 4), np.round(sensor_spectrum[6], 4), np.round(sensor_spectrum[7], 4), np.round(sensor_spectrum[8], 4), self.acquisition_date, lc_code, self.scene_id ) try: self.cursor.execute(insert_statement) self.conn.commit() except DatabaseError: self.__logger.error("INSERT of synthetic spectra failed!", exc_info=True) continue
def update_luc_table(landcover_table, landcover_cfg=None): """ updates the land-cover/ land use table in OBIA4RTM that is required for performing land-cover class specific vegetation parameter retrieval Make sure that the classes in the config file match the land cover classes provided for the image objects and used for generating the lookup-table. Otherwise bad things might happen. NOTE: in case land cover classes that are about to be inserted are already stored in the table, they will be overwritten! Parameters ---------- landcover_table : String name of the table with the land cover information (<schema.table>) landcover_cfg : String file-path to land cover configurations file Returns ------- None """ # open the logger logger = get_logger() # if no other file is specified the default file from the OBIA4RTM # directory in the user profile will be used (landcover.cfg) if landcover_cfg is None: # determine the directory the configuration files are located obia4rtm_dir = os.path.dirname(OBIA4RTM.__file__) fname = obia4rtm_dir + os.sep + 'OBIA4RTM_HOME' with open(fname, 'r') as data: directory = data.readline() landcover_cfg = directory + os.sep + 'landcover.cfg' # check if specified file exists if not os.path.isfile(landcover_cfg): logger.error('The specified landcover.cfg cannot be found!', exc_info=True) close_logger(logger) sys.exit('Error during inserting landcover information. Check log!') # connect database con, cursor = connect_db() # read the landcover information luc_classes = get_landcover_classes(landcover_cfg) # now read in the actual data n_classes = len(luc_classes) # number of land cover classes try: assert n_classes >= 1 except AssertionError: logger.error('Error: >=1 land cover class must be provided!', exc_info=True) close_logger(logger) sys.exit('Error while reading the landcover.cfg file. Check log.') # now, iterate through the lines of the cfg files and insert it into # the Postgres database logger.info("Try to insert values into table '{0}' from landcover.cfg "\ "file ({1})".format( landcover_table, landcover_cfg)) for luc_class in luc_classes: # the first item of the tuple must be an integer value # the second one a string try: luc_code = int(luc_class[0]) except ValueError: logger.error('Landcover.cfg file seems to be corrupt. '\ 'Excepted integer for land cover code!', exc_info=True) close_logger(logger) sys.exit('Error during inserting landcover.cfg. Check log!') try: luc_desc = luc_class[1] except ValueError: logger.error('Landcover.cfg file seems to be corrupt. '\ 'Excepted string for land cover description!', exc_info=True) close_logger(logger) sys.exit('Error during inserting landcover.cfg. Check log!') # insert into database # ON CONFLICT -> old values will be replaced sql = "INSERT INTO {0} (landuse, landuse_semantic) VALUES ({1},'{2}')"\ " ON CONFLICT (landuse) DO UPDATE SET landuse = {1},"\ " landuse_semantic = '{2}';".format( landcover_table, luc_code, luc_desc) cursor.execute(sql) con.commit() # close the logger and database connection afterwards logger.info("Updated land cover information in table '{}'".format( landcover_table)) close_logger(logger) close_db_connection(con, cursor)
def do_obj_inversion(self, object_id, acqui_date, land_use, num_solutions, inv_params, res_table, object_table, lut_table): """ performs inversion per single object using mean of xx best solutions (RMSE criterion) and stores result results table params to be inverted/ returned should be passed as list of strings e.g: inv_params = ["LAI", "CAB"] also inverted spectra can be returned: therefore just append the band numbers to the list of strings of parameters: e.g. inv_params = ["LAI", "CAB", "B2", "B3", etc.] Parameters ---------- object_id : Integer ID of the current object (derived from OBIA4RTM database) acqui_date : Date (YYYY-MM-DD) acquisition date of the image used for the inversion lande_use : Integer land cover code for the specific object and date num_solutions : Integer how many solutions should be used for generating the inversion result inv_params : List list of the parameters (must be named) to be inverted res_table : String tablename where to store the results of the inversion (<schema.table>) object_table : String tablename of the table containing the object spectra (<schema.table>) lut_table : String tablename of the lookup-table (<schema.table>) Returns ------- status : Integer zero if everything is OK """ query = """ SELECT lut.id, rmse(obj.b2, obj.b3, obj.b4, obj.b5, obj.b6, obj.b7, obj.b8a, obj.b11, obj.b12,lut.b2, lut.b3, lut.b4, lut.b5, lut.b6, lut.b7, lut.b8a, lut.b11, lut.b12) AS rmse FROM {0} as obj, {1} as lut WHERE obj.object_id = {2} AND obj.scene_id = '{3}' AND obj.landuse = {4} AND obj.landuse = lut.landuse AND obj.scene_id = lut.scene_id ORDER BY rmse ASC LIMIT {5};""".format( object_table, lut_table, object_id, self.scene_id, land_use, num_solutions) try: self.cursor.execute(query) inv_res = self.cursor.fetchall() lut_ids = [item[0] for item in inv_res] rmse_vals = [item[1] for item in inv_res] # convert lut_ids to str lut_ids = str(lut_ids) lut_ids = lut_ids.replace("[", "(") lut_ids = lut_ids.replace("]", ")") # convert the params to be inverted in the correct format # for SQL-query sql_snippets = [] for param in inv_params: sql_snippet = "AVG(" + param + ")" sql_snippets.append(sql_snippet) # endfor sql_snippets = str(sql_snippets) sql_snippets = sql_snippets[1:len(sql_snippets)-1] sql_snippets = sql_snippets.replace("'", "") # select the biophysical parameters from the xx best solutions in # the lut table using the lut ids as keys query = "SELECT {0} FROM {1} WHERE id in {2};".format( sql_snippets, lut_table, lut_ids) try: self.cursor.execute(query) mean_params = self.cursor.fetchall() # convert result to dictionary for storing results in DB result_dict = dict() index = 0 for param in inv_params: result_dict[param] = mean_params[0][index] index += 1 # also store the errors error_dict = dict() for ii in range(num_solutions): error_dict[str(ii+1)] = rmse_vals[ii] # convert to json result_json = json.dumps(result_dict) error_json = json.dumps(error_dict) # insert statement insert = "INSERT INTO {0} (object_id, acquisition_date, "\ "inversion_results, inversion_errors, scene_id) VALUES ({1}, "\ "'{2}', '{3}', '{4}', '{5}') ON CONFLICT (object_id, "\ "scene_id) DO NOTHING;".format( res_table, object_id, acqui_date, result_json, error_json, self.scene_id ) try: self.cursor.execute(insert) self.conn.commit() except Exception: self.__logger.error("Insert of results for object "\ "{0} failed!".format( object_id), exc_info=True) close_logger(self.__logger) print(error_message) return -1 except Exception: self.__logger.error("No inversion result could be obtained "\ "for object {0}".format( object_id), exc_info=True) close_logger(self.__logger) print(error_message) return -1 except Exception as err: self.__logger.error("Inverting object with id {0} failed".format( object_id), exc_info=True) close_logger(self.__logger) print(error_message) return -1 # return zero if everything was OK status = 0 return status
def call_sen2core(sentinel_data_dir, zipped, resolution, path_sen2core): """ calls Sen2Core and runs it on a downloaded Sentinel 1C dataset to convert it to Level 2A. The output spatial resolution must be provided (10, 20 or 60 meters) NOTE: If you have already L2 imagery then only run the gdal_merge wrapper and to not use this function Parameters ---------- sentinel_data_dir : String path to the directory that contains the Level-1C data. In case the data is zipped (default when downloaded from Copernicus) specify the file-path of the zip zipped : Boolean specifies if the directory with the Sat data is zipped resolution : Integer spatial resolution of the atmospherically corrected imagery possible value: 10, 20, 60 meters path_sen2core : String directory containing Sen2Core software (top-most level; e.g. /home/user/Sen2Core/). Must be the same directory as specified during the Sen2Core installation process using the --target option Returns ------- sentinel_data_dir_l2 """ # check inputs if zipped: if not os.path.isfile(sentinel_data_dir): print("Error: '{}' does not exist!".format(sentinel_data_dir)) sys.exit(-1) else: if not os.path.isdir(sentinel_data_dir): print("Error: '{}' does not exist!".format(sentinel_data_dir)) sys.exit(-1) # check specified spatial resolution of the result allowed_res = [10, 20, 60] # m if resolution not in allowed_res: print("Error: The specified spatial resolution of {0} is not allowed!\n"\ "Must be one of {1}!".format(resolution, allowed_res)) sys.exit(-1) # check if Sen2Core is installed and working runs, cmd = check_sen2core_installation(path_sen2core) if not runs: print('Error: Sen2Core seems not to work properly!') sys.exit(-1) # after that, enable logging logger = get_logger() logger.info('Setting up Sen2Core processing environment for processing '\ '{}'.format(sentinel_data_dir)) # if the data is still zipped unzip if zipped: # extract the parent directory parent_dir = os.path.dirname(sentinel_data_dir) # unzip zip_file = ZipFile(sentinel_data_dir) zip_file.extractall(parent_dir) zip_file.close() # check if everything is OK # new name of sentinel_data_dir is now without ending .zip sentinel_data_dir = sentinel_data_dir.replace('.zip', '') # should be a directory if not os.path.isdir(sentinel_data_dir): logger.error('Unpacking of Sentinel-2 data failed!') close_logger(logger) sys.exit(error_message) logger.info("Successfully extracted zipped data to {}".format( sentinel_data_dir)) # endif # now the data is ready for Sen2Core # create the command to run Sen2Core cmd = cmd.replace(' --help', '') cmd = cmd + ' ' + sentinel_data_dir + ' --resolution=' + str(resolution) # call Sen2Core # try to execute the command logger.info('Starting Sen2Core processing') out = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) # read the communication of the process res = out.communicate() stdout = res[0].decode() stderr = res[1].decode() # write output to file # determine the filename sub_pos = [m.start() for m in re.finditer('_', sentinel_data_dir)] fname = sentinel_data_dir[0:sub_pos[2]] # parent directory parent_dir = os.path.dirname(sentinel_data_dir) # save output of Sen2Core sen2core_stderr = parent_dir + os.sep + 'Sen2Core_' + os.path.basename( fname) + '.stdout' with open(sen2core_stderr, 'a') as out: out.writelines(stdout) sen2core_stdout = parent_dir + os.sep + 'Sen2Core_' + os.path.basename( fname) + '.stderr' with open(sen2core_stdout, 'a') as out: if stderr == '': stderr = 'Sen2Core terminated without errors' out.writelines(stderr) # determine the dir-name of the processed data fname = fname.replace('MSIL1C', 'MSIL2A') # get list of all directories list_dirs = os.listdir(parent_dir) # directory of the L2 data sentinel_data_dir_l2 = '' for directory in list_dirs: sub_pos = [m.start() for m in re.finditer('_', directory)] fname_act = parent_dir + os.sep + directory[0:sub_pos[2]] if fname_act == fname: sentinel_data_dir_l2 = parent_dir + os.sep + directory break if sentinel_data_dir_l2 == '': logger.error( 'Seems as if Sen2Core failed. Check {} for more info.'.format( sen2core_stdout)) close_logger(logger) sys.exit(error_message) logger.info("Success - Processed data is in '{0}. See also {1}'".format( sentinel_data_dir_l2, sen2core_stdout)) # delete the L1C directory in case the data was available as zip to # save disk storage if zipped: shutil.rmtree(sentinel_data_dir, ignore_errors=True) close_logger(logger) return sentinel_data_dir_l2
def call_gdal_merge(sentinel_data_dir_l2, resolution, storage_dir=None): """ calls the gdal_merge.py script to make an image layer stack and prepare the imagery for usage in OBIA4RTM. Outputs a GeoTiff with the nine Sentinel-2 bands used in OBIA4RTM and the SCL band that contains the preclassification information. You can use this function also if you L2 data Parameters ---------- sentinel_data_dir_l2 : String path of the directory containing the output of sen2core in L2 level resolution : Integer spatial resolution of the atmospherically corrected imagery possible value: 10, 20, 60 meters storage_dir : String path to the directory the layer stack should be moved to. If None, the layer stack will remain the sentinel_data_dir_l2 in the img folder Returns ------- fname_stack : String file-path to the stacked imagery metadata_xml : String file-path to the metadata xml file """ # enable logging logger = get_logger() # check inputs if not os.path.isdir(sentinel_data_dir_l2): logger.error( "Error: '{}' does not exist!".format(sentinel_data_dir_l2)) close_logger(logger) sys.exit(error_message) # resolution allowed_res = [10, 20, 60] # m if resolution not in allowed_res: logger.error("Error: The specified spatial resolution of {0} is not allowed! "\ "Must be one of {1}!".format(resolution, allowed_res)) sys.exit(error_message) # storage directory if storage_dir is not None: if not os.path.isdir(storage_dir): try: os.mkdir(storage_dir) except PermissionError: logger.error("Could not create directory '{}'".format( sentinel_data_dir_l2), exc_info=True) logger.info('Formatting sen2core output for OBIA4RTM') # change to the sentinel_data_dir_l2 to avoid endless path names # jump directly into the 'Granule directory' sentinel_data_dir_l2 = sentinel_data_dir_l2 + os.sep + 'GRANULE' os.chdir(sentinel_data_dir_l2) # path to the image bands next_subdir = os.listdir()[0] # now the full path can be constructed sentinel_data_dir_l2 += os.sep + next_subdir os.chdir(sentinel_data_dir_l2) # check if the MTD_TL.xml metadata file can be found if not os.path.isfile('MTD_TL.xml'): logger.warning('No metadata-xml file could be found!') metadata_xml = '' else: metadata_xml = sentinel_data_dir_l2 + os.sep + 'MTD_TL.xml' sentinel_data_dir_l2 += os.sep + 'IMG_DATA' + os.sep + 'R' + str( resolution) + 'm' try: os.chdir(sentinel_data_dir_l2) except FileNotFoundError: logger.error('Could not find {}'.format(sentinel_data_dir_l2), exc_info=True) close_logger(logger) sys.exit(error_message) # get the jp2 files containing the single bands and the SCL information band_files = os.listdir() band_names = [ 'B02', 'B03', 'B04', 'B05', 'B06', 'B07', 'B8A', 'B11', 'B12', 'SCL' ] stack_files = [] # loop over the filenames to find the important ones for band_file in band_files: for band_name in band_names: if band_file.find(band_name) != -1: stack_files.append(band_file) break # now the list must be sorted to bring the bands in the correct order stack_files.sort() # check if 10 bands were found try: assert len(stack_files) == 10 except AssertionError: logger.error('Expected 10 bands got {}'.format(len(stack_files)), exc_info=True) close_logger(logger) sys.exit(error_message) # convert list to string stack_files = str(stack_files).replace('[', '').replace(']', '').replace(',', ' ') # make name of stacked layer file sub_pos = [m.start() for m in re.finditer('_', band_files[0])] prefix = band_files[0][0:sub_pos[1]] fname_stack = prefix + '_merged.tiff' # now everything is ready for running the gdal_merge.py script # output is GTiff and not Jpeg2000 as there are still some driver problems cmd = 'gdal_merge.py -of GTiff -separate ' + stack_files + ' -o ' + fname_stack logger.info('Running {}'.format(cmd)) out = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) res = out.communicate() stderr = res[1].decode() # check if the command worked if stderr != '': logger.error('gdal_merge failed: {}'.format(stderr)) close_logger(logger) sys.exit(error_message) logger.info( 'Successfully stacked Sentinel-2 bands ({})'.format(fname_stack)) # give the full path fname_stack_short = fname_stack fname_stack = sentinel_data_dir_l2 + os.sep + fname_stack # in case an alternative directory was specified move the metadate file and # the stacked layer file to this directory if storage_dir is not None: # rename the metadata-file and copy it to the storage drive copy = shutil.copy2(metadata_xml, storage_dir + os.sep + prefix + '_MTD_TL.xml') try: assert copy != '' assert copy is not None except AssertionError: logger.error('Copying of metafile failed!', exc_info=True) close_logger(logger) sys.exit(error_message) # move the stacked image os.rename(fname_stack, storage_dir + os.sep + fname_stack_short) logger.info("Moved the imagery and the metadata xml to '{}'".format( storage_dir)) # close the logger close_logger(logger) # return the file-paths of the imagery and the metadata return fname_stack, metadata_xml
def create_schema(): """ this function is used to generate a new schema in the OBIA4RTM database. In case the schema already exists, nothing will happen. The schema to be created is taken from the obia4rtm_backend.cfg file Parameters ---------- None Returns ------- status : integer zero if everything was OK """ status = 0 # connect to OBIA4RTM database con, cursor = connect_db() # open a logger logger = get_logger() logger.info('Trying to setup a new schema for the OBIA4RTM database') # read in the obia4rtm_backend information to get the name of the schema # therefore the obia4rtm_backend.cfg file must be read install_dir = os.path.dirname(OBIA4RTM.__file__) home_pointer = install_dir + os.sep + 'OBIA4RTM_HOME' if not os.path.isfile(home_pointer): logger.error('Cannot determine OBIA4RTM Home directory!') close_logger(logger) sys.exit(-1) with open(home_pointer, "r") as data: obia4rtm_home = data.read() backend_cfg = obia4rtm_home + os.sep + 'obia4rtm_backend.cfg' if not os.path.isfile(backend_cfg): logger.error( 'Cannot read obia4rtm_backend.cfg from {}!'.format(obia4rtm_home)) close_logger(logger) sys.exit(sys_exit_message) # now, the cfg information can be read in using the configParser class parser = ConfigParser() try: parser.read(backend_cfg) except MissingSectionHeaderError: logger.error( 'The obia4rtm_backend.cfg does not fulfil the formal requirements!', exc_info=True) close_logger(logger) sys.exit(-1) # no get the name of the schema schema = parser.get('schema-setting', 'schema_obia4rtm') try: assert schema is not None and schema != '' except AssertionError: logger.error( 'The version of your obia4rtm_backend.cfg file seems to be corrupt!', exc_info=True) close_logger(logger) sys.exit(sys_exit_message) # if the schema name is OK, the schema can be created # if the schema already exists in the current database, nothing will happen sql = 'CREATE SCHEMA IF NOT EXISTS {};'.format(schema) cursor.execute(sql) con.commit() # enable PostGIS and HSTORE extension # enable the PostGIS extension # in case it fails it is most likely because the extension was almost # enabled as it should sql = "CREATE EXTENSION PostGIS;" try: cursor.execute(sql) con.commit() except (ProgrammingError, DatabaseError): logger.info("PostGIS already enabled!") con.rollback() pass # enable the HSTORE extension sql = "CREATE EXTENSION HSTORE;" try: cursor.execute(sql) con.commit() except (ProgrammingError, DatabaseError): logger.error("HSTORE already enabled!") con.rollback() pass logger.info( "Successfully created schema '{}' in current OBIA4RTM database!". format(schema)) # after that the schema-specific tables are created that are required # in OBIA4RTM sql_home = install_dir + os.sep + 'SQL' + os.sep + 'Tables' # the tables 's2_inversion_results, s2_lookuptable, s2_objects and s2_inversion_mapping # must be created within the schema # check if the tables already exist before trying to create them sql_scripts = [ 's2_lookuptable.sql', 's2_inversion_results.sql', 's2_objects.sql', 'inversion_mapping.sql' ] # go through the config file to get the table-names table_names = [] table_names.append(parser.get('schema-setting', 'table_lookuptabe')) table_names.append(parser.get('schema-setting', 'table_inv_results')) table_names.append(parser.get('schema-setting', 'table_object_spectra')) table_names.append(parser.get('schema-setting', 'table_inv_mapping')) # the parser can be cleared now as all information is read parser.clear() # iterate through the 4 scripts to create the tables given they not exist for index in range(len(sql_scripts)): sql_script = sql_home + os.sep + sql_scripts[index] table_name = table_names[index] # check if the table already exists exists = check_if_exists(schema, table_name, cursor) # if already exists table log a warning and continue with the next table if exists: logger.warning( "Table '{0}' already exists in schema '{1}' - skipping".format( table_name, schema)) continue # else create the table # get the corresponding sql-statment and try to execute it sql_statement = create_sql_statement(sql_script, schema, table_name, logger) try: cursor.execute(sql_statement) con.commit() except (DatabaseError, ProgrammingError): logger.error("Creating table '{0}' in schema '{1}' failed!".format( table_name, schema), exc_info=True) close_logger(logger) sys.exit(sys_exit_message) # log success logger.info("Successfully created table '{0}' in schema '{1}'".format( table_name, schema)) # create the RMSE function required for inverting the spectra fun_home = install_dir + os.sep + 'SQL' + os.sep + 'Queries_Functions' rmse_fun = fun_home + os.sep + 'rmse_function.sql' sql_statement = create_function_statement(rmse_fun, logger) try: cursor.execute(sql_statement) con.commit() except (DatabaseError, ProgrammingError): logger.error("Creating function '{0}' failed!".format(rmse_fun), exc_info=True) close_logger(logger) sys.exit(sys_exit_message) # after iterating, the db connection and the logger can be close close_db_connection(con, cursor) close_logger(logger) return status
def run_py6s(self, geom, acqui_date, option=1): """ runs the 6S algorithm for atmospheric correction on a user-defined geometry and date on Sentinel-2 imagery Requires Google Earth-Engine Python API client Parameters ---------- geom : EE-Geometry Google EE geometry specify the geographic extent to be processed acqui_date : String date (YYYY-MM-dd) of the desired scene to be processed option : Integer for computing the cloud mask the user can decide whether the ESA delivered quality layer should be used for cloud masking (option=1) or an alternative approach originally developed for Landsat TM (option=2) should be used (Def=1) Returns ------- s2_surf : ee.image.Image Google EE image instance with surface reflectance values and two additional bands containing Clouds ('CloudMask') and detected cloud shadows ('CloudShadows') """ date = ee.Date(acqui_date) # get the Sentinel-2 image at or immediately after the specified date self.S2 = ee.Image( ee.ImageCollection('COPERNICUS/S2') .filterBounds(geom) .filterDate(date,date.advance(3,'month')) .sort('system:time_start') .first() ) # extract the relevant metadata for carrying out the atmospheric correction self.info = self.S2.getInfo()['properties'] # get the solar zenith angle an the scene data self.scene_date = datetime.datetime.utcfromtimestamp( self.info['system:time_start']/1000) self.solar_z = self.info['MEAN_SOLAR_ZENITH_ANGLE'] # log the identifier of the processed scene scene_id = self.info.get('DATASTRIP_ID') self.__logger.info("Starting Processing scene '{}' using GEE and Py6S".format( scene_id)) # get the atmospheric constituents # i.e water vapor (h2o), ozone (o3), aerosol optical thickness (aot) h2o = Atmospheric.water(geom, date).getInfo() o3 = Atmospheric.ozone(geom, date).getInfo() aot = Atmospheric.aerosol(geom, date).getInfo() # add this information to the info dictionary as this metadata could # be of interest afterwards self.info['WATER_VAPOUR'] = h2o self.info['OZONE'] = o3 self.info['AOT'] = aot # get the average altitude of the region to be processed # for the Digital Elevation Model (DEM) the Shuttle Radar Topography # Mission (SRTM) is used (Version 4) as it covers most parts of the # Earth (90 arc-sec data is used) SRTM = ee.Image('CGIAR/SRTM90_V4') alt = SRTM.reduceRegion(reducer = ee.Reducer.mean(), geometry = geom.centroid()).get('elevation').getInfo() message = "Atcorr-Metadata: Water-Vapor = {0}, Ozone = {1}, AOT = {2}, "\ " Average Altitude (m) = {3}".format( h2o, o3, aot, alt) self.__logger.info(message) # Py6S uses units of kilometers km = alt/1000 # mask out clouds and cirrus from the imagery using the cloud score # optionally # algorithm provided by Sam Murhpy under Apache 2.0 licence # see: https://github.com/samsammurphy/cloud-masking-sentinel2/blob/master/cloud-masking-sentinel2.ipynb # also converts the image values to top-of-atmosphere reflectance self.__logger.info('Calculating cloud and shadow mask') self.S2 = mask_clouds(self.S2, option=1) self.__logger.info('Finished calculating cloud and shadow mask') # create a 6S object from the Py6S class # Instantiate (use the explizit path to installation directory of the # 6S binary as otherwise there might be an error) s = SixS() # Atmospheric constituents s.atmos_profile = AtmosProfile.UserWaterAndOzone(h2o,o3) s.aero_profile = AeroProfile.Continental s.aot550 = aot # Earth-Sun-satellite geometry s.geometry = Geometry.User() s.geometry.view_z = 0 # always NADIR (simplification!) s.geometry.solar_z = self.solar_z # solar zenith angle s.geometry.month = self.scene_date.month # month and day used for Earth-Sun distance s.geometry.day = self.scene_date.day # month and day used for Earth-Sun distance s.altitudes.set_sensor_satellite_level() s.altitudes.set_target_custom_altitude(km) self.__logger.info('6S: Starting processing of Sentinel-2 scene!') # now iterate over the nine relevant Sentinel-2 bands to perform the # atmospheric correction and get the surface reflectance # go through the spectral bands B2_surf = self.surface_reflectance(s, 'B2') self.__logger.info('6S: Finished processing Sentinel-2 Band 2!') B3_surf = self.surface_reflectance(s, 'B3') self.__logger.info('6S: Finished processing Sentinel-2 Band 3!') B4_surf = self.surface_reflectance(s, 'B4') self.__logger.info('6S: Finished processing Sentinel-2 Band 4!') B5_surf = self.surface_reflectance(s, 'B5') self.__logger.info('6S: Finished processing Sentinel-2 Band 5!') B6_surf = self.surface_reflectance(s, 'B6') self.__logger.info('6S: Finished processing Sentinel-2 Band 6!') B7_surf = self.surface_reflectance(s, 'B7') self.__logger.info('6S: Finished processing Sentinel-2 Band 7!') B8A_surf = self.surface_reflectance(s, 'B8A') self.__logger.info('6S: Finished processing Sentinel-2 Band 8A!') B11_surf = self.surface_reflectance(s, 'B11') self.__logger.info('6S: Finished processing Sentinel-2 Band 11!') B12_surf = self.surface_reflectance(s, 'B12') self.__logger.info('6S: Finished processing Sentinel-2 Band 12!') self.__logger.info('6S: Finished processing of Sentinel-2 scene!') # make a stack of the spectral bands # also add the computed cloud and shadow mask cm = self.S2.select('CloudMask') sm = self.S2.select('ShadowMask') S2_surf = B2_surf.addBands(B3_surf).addBands(B4_surf).addBands(B5_surf).addBands(B6_surf).addBands(B7_surf).addBands(B8A_surf).addBands(B11_surf).addBands(B12_surf).addBands(cm).addBands(sm) # return the surface reflectance image close_logger(self.__logger) return S2_surf
def do_inversion(self, land_use, num_solutions, res_table, object_table, inv_mapping_table, lut_table, return_specs=True): """ performs inversion on all objects for a given date. NOTE: the object reflectance values must be already available in the data base. Run gen_lut therefore before! Works as a wrapper around the do_object_inversion method Parameters ---------- lande_use : Integer land cover code for the specific object and date num_solutions : Integer how many solutions should be used for generating the inversion result res_table : String tablename where to store the results of the inversion (<schema.table>) object_table : String tablename of table containing the object spectra (<schema.table>) inv_mapping_table : String tablename of the table containing the parameters to be inverted per acqusition date (scene) and land use/ cover class lut_table : String table containing the ProSAIL lut on a per scene and landuse / cover class base return_specs : Boolean determines whether inverted spectra should be returned (True; default) Returns ------- None """ # read in the scene metata self.get_scene_metadata() # get list of objects available for a given land use class at a given day query = "SELECT DISTINCT object_id FROM {0} " \ " WHERE acquisition_date = '{1}'" \ " AND landuse = {2};".format( object_table, self.acquisition_date, land_use) try: self.cursor.execute(query) object_ids = self.cursor.fetchall() object_ids = [item[0] for item in object_ids] except Exception: self.__logger.error("Could not query objects for acquistion date "\ "'{0}' and LUC {1}".format( self.acquisition_date, land_use), exc_info=True) close_logger(self.__logger) sys.exit(-1) # get the list of params to be inverted query = "SELECT params_to_be_inverted FROM {0}" \ " WHERE scene_id = '{1}' AND landuse = {2};".format( inv_mapping_table, self.scene_id, land_use ) try: self.cursor.execute(query) params = self.cursor.fetchall() params_dict = params[0][0] # convert to list params_list = [] for key, val in params_dict.items(): params_list.append(val) # if inverted spectra should be returned add them to params_list if (return_specs): band_names = ["B2", "B3", "B4", "B5", "B6", "B7", "B8A", "B11", "B12"] for band_name in band_names: params_list.append(band_name) # endfor # endif except Exception : self.__logger.error("Retrieving inversion metadata for acquisition "\ "scene '{0}' and LUC {1} failed!".format( self.scene_id, land_use), exc_info=True) close_logger(self.__logger) sys.exit(error_message) # iterate over all objects to perform the inversion per object for ii in range(len(object_ids)): object_id = object_ids[ii] resrun = self.do_obj_inversion(object_id, self.acquisition_date, land_use, num_solutions, params_list, res_table, object_table, lut_table) # in case an error happened continue with next object if resrun != 0: # reopen the database connection in case an error occured self.conn, self.cursor = connect_db.connect_db() continue # endif # endfor # close database connection at the end if self.conn is not None: self.cursor.close() self.conn.close()
def get_mean_refl_ee(shp_file, img, acqui_date, scene_id, table_name): """ calculates mean reflectance per object in image. Uses GEE-Python bindings for reading the shape and Sentinel-2 imagery data. Parameters ---------- shp_file : String file-path to ESRI shapefile with the image object boundaries img : ee.image.Image GEE imagery containing the atmospherically collected Sentinel-2 data acqui_date : String acquisition date of the imagery (used for linking to LUT and metadata) scene_id : String ID of the Sentinel-2 scene table_name : String Name of the table the object reflectance values should be written to Returns ------- None """ # open the database connection to OBIA4RTM's backend conn, cursor = connect_db() # get a logger logger = get_logger() # in case it isn't done yet: ee.Initialize() # iterate over the shapefile to get the metadata # Shapefile handling driver = ogr.GetDriverByName('ESRI Shapefile') shpfile = driver.Open(shp_file) # check if shapefile exists and could be opened if shpfile is None: raise TypeError( "The provided File '{}' is invalid or blocked!".format(shp_file)) layer = shpfile.GetLayer(0) num_objects = layer.GetFeatureCount() logger.info( "{0} image objects will be processed. This might take a while...". format(num_objects)) # loop over single features # get geometry of features and their ID as well as mean reflectane per band # before that check the raster metadata from GEE img_epsg = img.select('B2').projection().crs().getInfo() img_epsg = int(img_epsg.split(':')[1]) # check with the epsg of the shapefile ref = layer.GetSpatialRef() if ref is None: logger.warning('The layer has no projection info! Assume it is the same'\ 'as for the imagery - but check results!') shp_epsg = img_epsg # asuming that the imagery is projected in UTM as it should # the UTM-Zone is stored in the last two digits utm = int(str(shp_epsg)[3::]) else: code = ref.GetAuthorityCode(None) shp_epsg = int(code) utm = ref.GetUTMZone() if img_epsg != shp_epsg: logger.error('The projection of the imagery does not match the projection '\ 'of the shapefile you provided!'\ 'EPSG-Code of the Image: EPSG:{0}; '\ 'EPSG-Code of the Shapefile: EPSG:{1}'.format( img_epsg, shp_epsg)) close_logger(logger) sys.exit( 'An error occured while execute get_mean_refl. Check logfile!') # determine the min area of an object (determined by S2 spatial resolution) # use the "standard" resolution of 20 meters # an object must be twice times larger min_area = 20 * 60 * 2 # for requesting the landuse information luc_field = 'LU' + acqui_date.replace('-', '') # start iterating over features # Get geometry and extent of feature for ii in range(num_objects): feature = layer.GetFeature(ii) # extract the geometry geom = feature.GetGeometryRef() # get a well-know text representation -> required by PostGIS wkt = geom.ExportToWkt() # get the ID # f_id = feature.GetFID() # depraceted f_id = feature.GetField('id') # get the land cover code luc = feature.GetField(luc_field) # convert to integer coding if luc is provided as text try: luc = int(luc) except ValueError: luc = luc.upper() query = "SELECT landuse FROM public.s2_landuse WHERE landuse_semantic = "\ "'{0}';".format( luc) cursor.execute(query) res = cursor.fetchall() luc = int(res[0][0]) # end try-except # get the area of the feature and check if it fits the image # resolution -> if the object is to small skip it area = geom.Area() # m2 # the area must be at least 2.5 times larger than the coarsest # possible spatial resolution of Sentinel-2 (60 by 60 meters) if area < min_area: logger.warning('The object {0} was too small compared to the '\ 'spatial resolution of Sentinel-2! '\ 'Object area (m2): {1}; Minimum area required (m2): '\ '{2} -> skipping'.format( f_id, area, min_area)) continue # export the coordinates of the geometry temporarily to JSON dictionary # for communicating with GEE geom_json = ast.literal_eval(geom.ExportToJson()) # get the geometry type # allowed values: Polygon and Multipolygon geom_type = geom_json.get('type') # get the coordinates geom_coords = geom_json.get('coordinates')[0] # must be converted to lon, lat for GEE geo_coords = [] for geom_coord in geom_coords: easting = geom_coord[0] northing = geom_coord[1] # call transform method lon, lat, alt = transform_utm_to_wgs84(easting, northing, utm) geo_coord = [] geo_coord.append(lon) geo_coord.append(lat) geo_coords.append(geo_coord) if geom_type not in ['Polygon', 'Multipolygon']: logger.warning('Object with ID {} is not of type Polygon or '\ 'Multipolygon -> skipping'.format(f_id)) continue # construct a GEE geometry # TODO -> test what happens for Multipolygon! geom_gee = ee.geometry.Geometry.Polygon(geo_coords) # use the image reduce function to get the mean reflectance values # for each of the nine bands used in GEE meanDictionary = img.reduceRegion(reducer=ee.Reducer.mean(), geometry=geom_gee) # extract the computed mean values for the particular image # only use those bands required for OBIA4RTM # multiply with 100 to get % surface reflectance values multiplier = 100 # surround with try-except in case only blackfill was found for a object try: B2 = meanDictionary.get('B2').getInfo() * multiplier B3 = meanDictionary.get('B3').getInfo() * multiplier B4 = meanDictionary.get('B4').getInfo() * multiplier B5 = meanDictionary.get('B5').getInfo() * multiplier B6 = meanDictionary.get('B6').getInfo() * multiplier B7 = meanDictionary.get('B7').getInfo() * multiplier B8A = meanDictionary.get('B8A').getInfo() * multiplier B11 = meanDictionary.get('B11').getInfo() * multiplier B12 = meanDictionary.get('B12').getInfo() * multiplier except TypeError: logger.info( 'No spectral information found for Object with ID {}'.format( f_id)) continue # check cloud and shadow mask # the cloud and shadow masks are binary # if the average is zero everything is OK (=no clouds, no shadows) cm = meanDictionary.get('CloudMask').getInfo() sm = meanDictionary.get('ShadowMask').getInfo() # if the shadow and/ or the cloud mask is not zero on average # -> skip the object as it is cloud covered or affected by # cloud shadows if cm > 0: logger.info( 'Object with ID {} is coverd by clouds -> skipping'.format( f_id)) continue if sm > 0: logger.info( 'Object with ID {} is coverd by cloud shadows -> skipping'. format(f_id)) continue # also make sure that the object really contains reflectance values # checking the first band should be sufficient if B2 is None: logger.info( 'Object with ID {} contains only NaN values -> skipping'. format(f_id)) continue # otherwise insert the data into the PostgreSQL database try: query = "INSERT INTO {0} (object_id, acquisition_date, landuse, object_geom, "\ "b2, b3, b4, b5, b6, b7, b8a, b11, b12, scene_id) VALUES ( " \ "{1}, '{2}', {3}, ST_Multi(ST_GeometryFromText('{4}', {5})), " \ "{6}, {7}, {8}, {9}, {10}, {11}, {12}, {13}, {14}, '{15}') "\ " ON CONFLICT (object_id, scene_id) DO NOTHING;".format( table_name, f_id, acqui_date, luc, wkt, img_epsg, np.round(B2, 4), np.round(B3, 4), np.round(B4, 4), np.round(B5, 4), np.round(B6, 4), np.round(B7, 4), np.round(B8A, 4), np.round(B11, 4), np.round(B12, 4), scene_id ) except ValueError: logger.error("Invalid string syntax encountered when attempting"\ " to generate INSERT for field {0} on '{1}'".format( f_id, acqui_date)) continue # catch errors for single objects accordingly and continue with next # object to avoid interrupts of whole workflow try: cursor.execute(query) conn.commit() except (DatabaseError, ProgrammingError): logger.error( "Could not insert image object with ID {0} into table '{1}'". format(f_id, table_name), exc_info=True) conn.rollback() continue #endfor # close the GDAL-bindings to the files shpfile = None layer = None # close database connection close_db_connection(conn, cursor) # close the logger close_logger(logger)
def get_mean_refl(shp_file, raster_file, acqui_date, scene_id, table_name): """ calculates mean reflectance per object in image. Uses GDAL-Python bindings for reading the shape and raster data. Parameters ---------- shp_file : String file-path to ESRI shapefile with the image object boundaries raster_file : String file-path to raster containing Sentinel-2 imagery as GeoTiff it is assumed that clouds/ shadows etc have already been masked out and these pixels are set to the according NoData value acqui_date : String acquisition date of the imagery (used for linking to LUT and metadata) scene_id : String ID of the Sentinel-2 scene table_name : String Name of the table the object reflectance values should be written to Returns ------- None """ # open the database connection to OBIA4RTM's backend conn, cursor = connect_db() # get a logger logger = get_logger() # iterate over the shapefile to get the metadata # Shapefile handling driver = ogr.GetDriverByName('ESRI Shapefile') shpfile = driver.Open(shp_file) layer = shpfile.GetLayer(0) num_objects = layer.GetFeatureCount() logger.info("{0} image objects will be processed".format(num_objects)) # loop over single features # get geometry of features and their ID as well as mean reflectane per band # open raster data value raster = gdal.Open(raster_file) # Get image raster georeference info transform = raster.GetGeoTransform() xOrigin = transform[0] yOrigin = transform[3] pixelWidth = transform[1] pixelHeight = transform[5] # extract the epsg-code proj = osr.SpatialReference(wkt=raster.GetProjection()) epsg = int(proj.GetAttrValue('AUTHORITY', 1)) # check with the epsg of the shapefile ref = layer.GetSpatialRef() if ref is None: logger.warning('The layer has no projection info! Assume it is the same'\ 'as for the imagery - but check results!') shp_epsg = epsg else: code = ref.GetAuthorityCode(None) shp_epsg = int(code) # check if the raster and the shapefile epsg match if epsg != shp_epsg: logger.error('The projection of the imagery does not match the projection '\ 'of the shapefile you provided!'\ 'EPSG-Code of the Image: EPSG:{0}; '\ 'EPSG-Code of the Shapefile: EPSG:{1}'.format( epsg, shp_epsg)) close_logger(logger) sys.exit( 'An error occured while execute get_mean_refl. Check logfile!') # check the image raster num_bands = 10 # Sentinel-2 bands: B2, B3, B4, B5, B6, B7, B8A, B11, B12 + SLC if (raster.RasterCount != num_bands): logger.error( "The number of bands you provided does not match the image file!") close_logger(logger) sys.exit(-1) # determine the min area of an object (determined by S2 spatial resolution) # use the "standard" resolution of 20 meters # an object must be twice times larger min_area = 20 * 20 * 2 # 20 by 20 meters times two as the minimum size constraint # for requesting the landuse information luc_field = 'LU' + acqui_date.replace('-', '') # Get geometry and extent of feature for ii in range(num_objects): feature = layer.GetFeature(ii) # extract the geometry geom = feature.GetGeometryRef() # get well-known-text of feature geomtry wkt = geom.ExportToWkt() # extract feature ID f_id = feature.GetFID() # get the area of the current feature area = geom.Area() # m2 # the area must be at least 2.5 times larger than the coarsest # possible spatial resolution of Sentinel-2 (60 by 60 meters) if area < min_area: logger.warning('The object {0} was too small compared to the '\ 'spatial resolution of Sentinel-2! '\ 'Object area (m2): {1}; Minimum area required (m2): '\ '{2} -> skipping'.format( f_id, area, min_area)) continue luc = feature.GetField(luc_field) # convert to integer coding if luc is provided as text try: luc = int(luc) except ValueError: luc = luc.upper() query = "SELECT landuse FROM s2_landuse WHERE landuse_semantic = '{0}';".format( luc) cursor.execute(query) res = cursor.fetchall() luc = int(res[0][0]) # end try-except # check for feature type -> could be either POLYGON or MULTIPOLYGON if (geom.GetGeometryName() == 'MULTIPOLYGON'): count = 0 pointsX = [] pointsY = [] for polygon in geom: geomInner = geom.GetGeometryRef(count) ring = geomInner.GetGeometryRef(0) numpoints = ring.GetPointCount() for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) count += 1 elif (geom.GetGeometryName() == 'POLYGON'): ring = geom.GetGeometryRef(0) numpoints = ring.GetPointCount() pointsX = [] pointsY = [] values = [] for p in range(numpoints): lon, lat, val = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) values.append(val) else: sys.exit( "ERROR: Geometry needs to be either Polygon or Multipolygon") #endif #get exact extent of feature for masking xmin = min(pointsX) xmax = max(pointsX) ymin = min(pointsY) ymax = max(pointsY) # Specify offset and rows and columns to read # -> thus, only a part of the array must be read # -> calculate the offset in rows in cols to go the specific part of the S2-raster xoff = int((xmin - xOrigin) / pixelWidth) yoff = int((yOrigin - ymax) / pixelWidth) xcount = int((xmax - xmin) / pixelWidth) + 1 ycount = int((ymax - ymin) / pixelWidth) + 1 # temporary raster for masking the actual feature target_ds = gdal.GetDriverByName('MEM').Create('', xcount, ycount, 1, gdal.GDT_Byte) target_ds.SetGeoTransform(( xmin, pixelWidth, 0, ymax, 0, pixelHeight, )) # Rasterize zone polygon to raster gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[1]) # the mask to be used for the calculation of the stats bandmask = target_ds.GetRasterBand(1) datamask = bandmask.ReadAsArray(0, 0, xcount, ycount).astype(np.float) # Rasterize zone polygon to raster -> thus data is only read at the location of the #actual feature gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[1]) # Read image raster as array meanValues = [] # iterator variable for looping over Sentinel-2 bands index = 1 # in case the object is cloud covered or of affected by cirrus skip_flag = False # iterate over the bands for ii in range(raster.RasterCount): banddataraster = raster.GetRasterBand(index) # read image data at the specific extent covering the actual feature dataraster = banddataraster.ReadAsArray(xoff, yoff, xcount, ycount).astype(np.float) # Mask zone of raster zoneraster = np.ma.masked_array(dataraster, np.logical_not(datamask)) # apply conversion factor of 0.01 to get the correct reflectance # values for ProSAIL if ii < raster.RasterCount - 1: mean = np.nanmean(zoneraster) * 0.01 meanValues.append(mean) # treat the SCL band with the pre-class info differently else: counts = np.bincount(zoneraster) # get the most frequent value argmax = np.argmax(counts) # in case the value is greater than 4 (vegetation) skip the object if argmax > 4.: skip_flag = True # increment index index += 1 #endfor # in case the skip flag was set -> skip if skip_flag: logger.info('The object is not vegetated -> skipping!') continue # check if the results are not nan -> if there are nans skip the object # as the ProSAIL model inversion cannot deal with missing values if any(np.isnan(meanValues)): logger.warning('The object with ID {} contains NaNs -> skipping!') continue # insert the mean reflectane and the object geometry into DB query = "INSERT INTO {0} (object_id, acquisition_date, landuse object_geom, "\ "b2, b3, b4, b5, b6, b7, b8a, b11, b12, scene_id) VALUES ( " \ "{1}, '{2}', {3}, ST_Multi(ST_GeometryFromText('{4}', {5})), " \ "{6}, {7}, {8}, {9}, {10}, {11}, {12}, {13}, {14}, '{15}'}) "\ " ON CONFLICT (object_id, scene_id) DO NOTHING;".format( table_name, f_id, acqui_date, luc, wkt, epsg, np.round(meanValues[0], 4), np.round(meanValues[1], 4), np.round(meanValues[2], 4), np.round(meanValues[3], 4), np.round(meanValues[4], 4), np.round(meanValues[5], 4), np.round(meanValues[6], 4), np.round(meanValues[7], 4), np.round(meanValues[8], 4), scene_id ) # catch errors for single objects accordingly and continue with next # object to avoid interrupts of whole workflow try: cursor.execute(query) conn.commit() except (DatabaseError, ProgrammingError): logger.error( "Could not insert image object with ID {0} into table '{1}'". format(f_id, table_name), exc_info=True) conn.rollback() continue # endfor # close the GDAL-bindings to the files raster = None shpfile = None layer = None # close database connection close_db_connection(conn, cursor)