def get_pixel_size(self, input_file, formula=None): log.info("get_pixel_size") log.info(input_file) log.info(formula) pixel_size = None # creating the cmd cmd = "gdalinfo " cmd += input_file # gdalinfo 'HDF4_EOS:EOS_GRID:"/home/vortex/Desktop/LAYERS/MODIS/033/MOD13Q1.A2014033.h23v09.005.2014050114129.hdf":MODIS_Grid_16DAY_250m_500m_VI:250m 16 days NDVI' | grep size cmd += " | grep Pixel" log.info(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() log.info(output) log.warn(error) if "Pixel Size" in output: pixel_size = output[output.find("(") + 1:output.find(",")] log.info(pixel_size) formula = formula.replace("{{PIXEL_SIZE}}", str(pixel_size)) log.info(formula) return eval(formula) return None
def gdal_merge(self, parameters, input_files, output_path): print "gdal_merge" output_files = [] output_file = os.path.join(output_path, "gdal_merge.hdf") output_files.append(output_file) # creating the cmd cmd = "gdal_merge.py " for key in parameters.keys(): cmd += " " + key + " " + str(parameters[key]) for input_file in input_files: cmd += " " + input_file cmd += " -o " + output_file log.info(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() log.info(output) log.warn(error) log.info(output_files) return output_files
def gdalwarp(self, parameters, input_files, output_path): print "gdalwarp input_files" print input_files output_files = [] output_file = os.path.join(output_path, "warp") output_files.append(output_file) cmd = "gdalwarp " for key in parameters["opt"].keys(): cmd += " " + key + " " + str(parameters["opt"][key]) for input_file in input_files: cmd += " " + input_file cmd += " " + output_file log.info(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() log.info(output) log.warn(error) log.info(output_files) return output_files
def get_pixel_size(self, input_file, formula=None): log.info("get_pixel_size") log.info(input_file) log.info(formula) pixel_size = None # creating the cmd cmd = "gdalinfo " cmd += input_file # gdalinfo 'HDF4_EOS:EOS_GRID:"/home/vortex/Desktop/LAYERS/MODIS/033/MOD13Q1.A2014033.h23v09.005.2014050114129.hdf":MODIS_Grid_16DAY_250m_500m_VI:250m 16 days NDVI' | grep size cmd += " | grep Pixel" log.info(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() log.info(output) log.warn(error) if "Pixel Size" in output: pixel_size = output[output.find("(")+1:output.find(",")] log.info(pixel_size) formula = formula.replace("{{PIXEL_SIZE}}", str(pixel_size)) log.info(formula) return eval(formula) return None
def publish_postgis_table(self, data, overwrite=False): """ :param datasource: datasource stored in geoserver :param name: name of the table in postgis :return: """ #curl -v -u admin:geoserver -XPOST -H "Content-type: text/xml" -d "<featureType><name>buildings</name></featureType>" #http://localhost:8080/geoserver/rest/workspaces/acme/datasources/nyc/featuretypes #data = data["featureType"] name = data["name"] log.info(name) workspace = self.get_default_workspace() if "workspace" not in data else data["workspace"] datastore = self.get_default_datastore() if "datastore" not in data else data["datastore"] if not overwrite: log.warn("TODO: shapefile") #if self.check_if_layer_exist(name, workspace): # raise PGeoException(errors[520]+": %s" % name) try: # TODO this can be done with just one request # Add layer to coveragestore headers = get_headers("xml") xml = "<featureType><name>{0}</name></featureType>".format(unicode(name).lower()) cs_url = url(self.service_url, ["workspaces", workspace, "datastores", datastore, 'featuretypes']) self._publish_layer(cs_url, "POST", xml, headers) # Update metadata of the layer headers = get_headers("json") json_data = deepcopy(data) del json_data['name'] # set has default enabled the layer if "enabled" not in json_data: json_data["enabled"] = True # json to send to geoserver update_layer = { "featureType" : json_data } cs_url = url(self.service_url, ["workspaces", workspace, "datastores", datastore, "featuretypes", name + ".json"] ) log.info(cs_url) self._publish_layer(cs_url, "PUT", json.dumps(update_layer), headers, 200) # TODO: check why doesn't update the default style if 'defaultStyle' in json_data: if 'name' in json_data['defaultStyle']: log.info("change default style style") self.set_default_style(name, json_data['defaultStyle']['name']) except PGeoException, e: log.error(e.get_message()) raise PGeoException(e.get_message(), e.get_status_code())
def remove(filepath): """ Remove a file @type filepath: string @param filepath: path to the file @type path: string @param path: path from the tmp folder """ try: os.remove(filepath) except: log.warn("file doesn't exists: " + str(filepath))
def query(self, query): try: if self.check_query(query): cur = self.con.cursor() cur.execute(query) rows = cur.fetchall() return rows else: log.warn("Query contains invalid characters") raise PGeoException("Query contains invalid characters", status_code=404) except PGeoException, e: self.con.rollback() raise PGeoException(e.get_message(), e.get_status_code())
def _get_default_db(self, dtype, connect=True): try: if self.settings["stats"]: if self.settings["db"]: db_id = self.settings["stats"]["db"][dtype] db = self.settings["db"][db_id] if connect: return DBStats(db) else: return db except: log.warn("No db found") pass
def __init__(self, datasource): if DBConnection.con is None: try: log.info("---PostGIS connection initialization---") self.datasource = datasource if self.datasource["schema"]: self.schema = self.datasource["schema"] db_connect_string = self.get_connection_string(False) self.con = psycopg2.connect(db_connect_string) log.info("Database '%s' connection opened. " % datasource['dbname'] ) except psycopg2.DatabaseError as db_error: log.warn("Error :\n{0}".format(db_error)) pass
def overviews_tif_file(output_file, parameters=None, overviews_levels=None): log.info("Create overviews") cmd = "gdaladdo " for key in parameters.keys(): cmd += " " + key + " " + str(parameters[key]) cmd += " " + output_file cmd += " " + overviews_levels log.info(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() log.info(output) log.warn(error) return output_file
def extract_band_files(self, input_files, output_path, ext=None): output_files = [] i = 0; for f in input_files: output_file_path = os.path.join(output_path, str(i) + ext) cmd = "gdal_translate '" + f + "' " + output_file_path log.info(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() #TODO catch the error log.info(output) log.warn(error) output_files.append(output_file_path) i += 1 return output_files
def reload_configuration_geoserver_slaves(self, force_master_reload=False): geoserver_cluster = self.config["geoserver_slaves"] if force_master_reload is True: geoserver_cluster.append(self.config["geoserver_master"]) for geoserver in geoserver_cluster: cs_url = url(geoserver, ["reload?recurse=true"]) headers, response = self.http.request(cs_url, "POST") log.info(headers) if headers.status == 200: log.info("Geoserver updated %s" % cs_url) else: log.warn("Raise error?") return False return True
def publish_shapefile(self, file_path, metadata_def=None, overwrite=False, publish_on_geoserver=True, publish_metadata=True): """ @param file_path: @param metadata_def: @param overwrite: @return: """ try: # add additional layer info to the metadata i.e. bbox and EPSG code if file_path is not None: add_metadata_from_vector(file_path, metadata_def) else: log.warn("Publishing an empty file: " + str(file_path)) # add additional layer info to the metadata i.e. bbox and EPSG code self._publish_shapefile(file_path, metadata_def, translate_from_metadata_to_geoserver(metadata_def, file_path), overwrite, publish_on_geoserver, publish_metadata) except PGeoException, e: raise PGeoException(e.get_message(), e.get_status_code())
def extract_band_files(self, input_files, output_path, ext=None): output_files = [] i = 0 for f in input_files: output_file_path = os.path.join(output_path, str(i) + ext) cmd = "gdal_translate '" + f + "' " + output_file_path log.info(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() #TODO catch the error log.info(output) log.warn(error) output_files.append(output_file_path) i += 1 return output_files
def gdaladdo(self, parameters, input_files, output_path=None): log.info(parameters) log.info(input_files) output_files = [] cmd = "gdaladdo " for key in parameters["parameters"].keys(): cmd += " " + key + " " + str(parameters["parameters"][key]) for input_file in input_files: cmd += " " + input_file output_files.append(input_file) cmd += " " + parameters["overviews_levels"] log.info(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output, error = process.communicate() log.info(output) log.warn(error) return output_files
def zonal_stats(self, json_stats): stats = None # TODO: a common zonalstats ''' :param json_stats: json with statistics definitions :return: json with response ''' # Raster # if the raster is a raster store in the datadir if "uid" in json_stats["raster"]: json_stats["raster"]["path"] = self.get_raster_path(json_stats["raster"]["uid"]) # Vector # TODO: make an ENUM somewhere (i.e. database, geojson, etc) #log.info(json_stats["vector"]["type"]) if json_stats["vector"]["type"] == "database": stats = self._zonal_stats_by_vector_database(json_stats) elif json_stats["vector"]["type"] == "geojson": log.warn("TODO: Geojson statistics") # Stats # TODO: save stats in case is needed or return statistics return stats
def query(self, query): try: if self.check_query(query): cur = self.con.cursor() cur.execute(query) rows = cur.fetchall() return rows else: log.warn("Query contains invalid characters") raise PGeoException("Query contains invalid characters", status_code=404) except PGeoException, e: self.con.rollback() raise PGeoException(e.get_message(), e.get_status_code()) except Exception, e: self.con.rollback() log.warn("Query error: use raise Exception? " + str(e)) def __del__(self): self.close_connection() def __exit__(self): self.close_connection() def close_connection(self): if self.con is not None: self.con.close() log.info("Database '%s' connection closed. " % self.datasource['dbname'] ) def get_connection_string(self, add_pg=True): db_connection_string = "" if add_pg is True: