def load_areas(self, hostile_path, habitat_path, ignore_path): # load hostile if hostile_path: ds = gdal.OpenEx(hostile_path, gdal.OF_VECTOR | gdal.OF_READONLY) if not ds: raise Exception('could not open file for reading') layer = ds.GetLayer() self.hostile_areas.extend(p.GetGeometryRef().Clone() for p in layer) del ds, layer if habitat_path: ds = gdal.OpenEx(habitat_path, gdal.OF_VECTOR | gdal.OF_READONLY) if not ds: raise Exception('could not open file for reading') layer = ds.GetLayer() i = iter(layer) p = next(i, None) if (not p) or next(i, None): raise Exception('habitat file must have exactly one polygon') self.habitat_area = p.GetGeometryRef().Clone() del ds, layer, i, p if ignore_path: ds = gdal.OpenEx(ignore_path, gdal.OF_VECTOR | gdal.OF_READONLY) if not ds: raise Exception('could not open file for reading') layer = ds.GetLayer() self.ignore_areas.extend(p.GetGeometryRef().Clone() for p in layer) del ds, layer
def open(self, *args, **kwargs): """ Opens the file. """ filename = self.file prepare_method = 'prepare_{0}'.format(self.method_safe_filetype) if hasattr(self, prepare_method): # prepare hooks make extension specific modifications to input parameters filename, args, kwargs = getattr(self, prepare_method)(filename, *args, **kwargs) open_options = kwargs.get('open_options', []) try: self.data = gdal.OpenEx(filename, open_options=open_options) except: msg = 'gdal.OpenEx({}, {}) failed.'.format(filename, open_options) logger.debug(msg) raise NoDataSourceFound(msg) if self.data is None: msg = 'gdal.OpenEx({}, {}) returned None.'.format( filename, open_options) logger.debug(msg) raise NoDataSourceFound(msg) return self.data
def open(self, *args, **kwargs): """ Opens the file. """ filename = self.file prepare_method = 'prepare_{0}'.format(self.method_safe_filetype) if hasattr(self, prepare_method): # prepare hooks make extension specific modifications to input parameters filename, args, kwargs = getattr(self, prepare_method)(filename, *args, **kwargs) open_options = kwargs.get('open_options', []) try: self.data = gdal.OpenEx(filename, open_options=open_options) except RuntimeError: raise NoDataSourceFound if self.data is None: raise NoDataSourceFound return self.data
def generic_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: if result[1].get('raster'): layer_path = result[0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual(layer.attributes.count(), DataSource(path)[0].num_fields) layer_results.append(layer) return layer_results[0]
def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layer_results = [] for result in res: if result[1].get('raster'): layerfile = result[0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue(layer.attributes.count() >= DataSource( filename)[0].num_fields) layer_results.append(layer) return layer_results[0]
def compress_to(task_graph, base_raster_path, resample_method, target_path): gtiff_driver = gdal.GetDriverByName('GTiff') base_raster = gdal.OpenEx(base_raster_path, gdal.OF_RASTER) LOGGER.info('compress %s to %s' % (base_raster_path, target_path)) compressed_raster = gtiff_driver.CreateCopy( target_path, base_raster, options=('TILED=YES', 'BIGTIFF=YES', 'COMPRESS=LZW', 'BLOCKXSIZE=256', 'BLOCKYSIZE=256')) min_dimension = min( pygeoprocessing.get_raster_info(target_path)['raster_size']) LOGGER.info(f"min min_dimension {min_dimension}") overview_levels = [] current_level = 2 while True: if min_dimension // current_level == 0: break overview_levels.append(current_level) current_level *= 2 LOGGER.info(f'level list: {overview_levels}') compressed_raster.BuildOverviews( resample_method, overview_levels, callback=_make_logger_callback( f'build overview for {os.path.basename(target_path)} ' '%.2f%% complete'))
def ogr2ogr(dest, src, *args, **kwargs): srcDS = gdal.OpenEx(src) opts = gdal.VectorTranslateOptions(skipFailures=True, *args, **kwargs) ds = gdal.VectorTranslate(dest, srcDS=srcDS, options = opts) # Dataset isn't written until dataset is closed and dereferenced # https://gis.stackexchange.com/questions/255586/gdal-vectortranslate-returns-empty-object del ds
def enmascarar_entrenamiento(vector_data_path, cols, rows, geo_transform, projection, target_value=1): data_source = gdal.OpenEx(vector_data_path, gdal.OF_VECTOR) layer = data_source.GetLayer(0) driver = gdal.GetDriverByName('MEM') target_ds = driver.Create('', cols, rows, 1, gdal.GDT_UInt16) target_ds.SetGeoTransform(geo_transform) target_ds.SetProjection(projection) gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[target_value]) return target_ds
def create_mask_from_vector(vector_data_path, cols, rows, geo_transform, projection, target_value=1): """Rasterize the given vectorToRaster (wrapper for gdal.RasterizeLayer).""" data_source = gdal.OpenEx(vector_data_path, gdal.OF_VECTOR) layer = data_source.GetLayer(0) driver = gdal.GetDriverByName('MEM') # In memory dataset target_ds = driver.Create('', cols, rows, 1, gdal.GDT_UInt16) target_ds.SetGeoTransform(geo_transform) target_ds.SetProjection(projection) gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[target_value]) return target_ds
def calc_stats_and_overviews(destName, pyramid_levels): ''' Calculate statistics and build overviews for tile ''' ds = gdal.OpenEx(destName, 0) # 0 = read-only (create external .ovr file) print('Building overviews and calculating stats for {}'.format(destName)) ds.GetRasterBand(1).GetStatistics(0, 1) gdal.SetConfigOption('COMPRESS_OVERVIEW', 'PACKBITS') ds.BuildOverviews("NEAREST", pyramid_levels) del ds
def format_SEB_VEC_values(path, snow_label, cloud_label, nodata_label): """ Update the shapfile according lis product specifications """ logging.info("Formatting snow/cloud shapefile") table = op.splitext(op.basename(path))[0] ds = gdal.OpenEx(path, gdal.OF_VECTOR | gdal.OF_UPDATE) ds.ExecuteSQL("ALTER TABLE " + table + " ADD COLUMN type varchar(15)") ds.ExecuteSQL("UPDATE " + table + " SET type='snow' WHERE DN="+\ snow_label, dialect="SQLITE") ds.ExecuteSQL("UPDATE " + table + " SET type='cloud' WHERE DN="+\ cloud_label, dialect="SQLITE") ds.ExecuteSQL("UPDATE " + table + " SET type='no data' WHERE DN == "+\ nodata_label, dialect="SQLITE")
def _check_osm(fn): """ check if the file is an openstreetmap file or not Args: fn: gdal vector file name Returns: True/False """ a = gdal.OpenEx(fn) drv = a.GetDriver().LongName a = None return drv.__contains__('OpenStreetMap')
def shp_to_csv(): root = Path(__file__).parent shapefiles = [ root / 'tmp' / each for each in os.listdir(root / 'tmp') if each.endswith('.shp') ] srcDS = gdal.OpenEx(str(shapefiles[0])) gdal.VectorTranslate(str(root / 'tmp' / 'nysdec_lands.csv'), srcDS, format='CSV', dstSRS='EPSG:4326', options=['-progress'], layerCreationOptions=['GEOMETRY=AS_WKT'])
def generic_raster_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_path = results[0][0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') return layer
def open2mem(self, path): gdal.SetConfigOption('OSM_CONFIG_FILE', 'osmconf.ini') ds = gdal.OpenEx(path, gdal.OF_READONLY) #,allowed_drivers=['PBF'] assert ds is not None layer = ds.GetLayer('multilinestrings') driver_mem = ogr.GetDriverByName('MEMORY') ds_mem = driver_mem.CreateDataSource('memData') driver_mem.Open('memData', 1) layer_mem = ds_mem.CopyLayer(ds.GetLayer('multilinestrings'), 'multilinestrings', ['OVERWRITE=YES']) return ds_mem, layer_mem
def save_fiona_objects(self, table_name, objs, mode="w", crs=None): if not mode in ["w", "a"]: raise NotImplementedError("supported modes are ['w', 'a']") if mode == "w": if crs is None: crs = self.get_crs(table_name) fiona.remove(str(self._gpkg_path), layer=table_name) if not self._table_exists(table_name): self.create_table(table_name, crs) schema = self.get_schema(table_name) for obj in objs: if "id" in obj["properties"]: del obj["properties"]["id"] ### Don't use fiona's writerecords because it doesn't keep fid. ### ds = gdal.OpenEx(str(self._gpkg_path), gdal.OF_UPDATE | gdal.OF_VECTOR) layer = ds.GetLayerByName(table_name) # Add id if not exist for i, obj in enumerate(objs): if not "id" in obj: obj["id"] = i + 1 # Get max id and increment in append mode if mode == "a": gdf = self.get_all_features_as_gdf(table_name) max_id = 0 if gdf.empty else max(gdf.id) obj["id"] += max_id for obj in objs: feature = ogr.Feature(layer.GetLayerDefn()) feature.SetFID(obj["id"]) if obj["geometry"]: geometry = ogr.CreateGeometryFromWkt( shape(obj["geometry"]).wkt) feature.SetGeometry(geometry) for prop in schema["properties"]: feature.SetField(prop, obj["properties"][prop]) ret = layer.CreateFeature(feature) if ret != 0: raise RuntimeError("failed to create feature") self._reload_table(table_name)
def retrieve_spatial_info_with_gdal(self): # logger.debug("retrieving spatial info with GDAL") try: ds = gdal.OpenEx(self.path) if ds is None: logger.warning("unable to open the file") return False self.meta.crs = ds.GetProjection() if self.meta.crs is None: logger.warning("unable to get CRS") return False # logger.debug("crs: %s" % self.meta.crs) self.meta.gt = ds.GetGeoTransform() if self.meta.gt is None: logger.warning("unable to get geotransform") return False # logger.debug("gt: %s" % (self.meta.gt,)) self.meta.x_res = self.meta.gt[1] self.meta.y_res = self.meta.gt[5] # logger.debug("res -> x: %s, y: %s" % (self._x_res, self._y_res)) self.meta.x_min = self.meta.gt[0] + self.meta.x_res * 0.5 self.meta.x_max = self.meta.gt[0] + ( self.meta.x_res * (ds.RasterXSize - 1)) + self.meta.x_res * 0.5 self.meta.y_min = self.meta.gt[3] + ( self.meta.y_res * (ds.RasterYSize - 1)) + self.meta.y_res * 0.5 self.meta.y_max = self.meta.gt[3] + self.meta.y_res * 0.5 # logger.debug("x -> min: %s, max: %s" % (self.meta.x_min, self.meta.x_max)) # logger.debug("y -> min: %s, max: %s" % (self.meta.y_min, self.meta.y_max)) # TODO: check for possible issues self.meta.y_res = abs(self.meta.y_res) # make y always positive self.meta.has_spatial_info = True # noinspection PyUnusedLocal ds = None return True except Exception as e: logger.warning("while using GDAL, %s" % e) return False
def generic_raster_import(self, file, configuration_options=[{ 'index': 0 }]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layerfile = res[0][0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') return layer
def main(): """Update the cache for list of input OPI.""" tiles, epsg = get_capabilities('cache_test/Capabilities.xml') out_raster_srs = gdal.osr.SpatialReference() out_raster_srs.ImportFromEPSG(epsg) conn_string = "PG:host=" + host + " dbname=" + database + " user="******" password="******"Connection to database failed") list_filename = glob.glob(sys.argv[1]) print(list_filename) with open('cache_test/cache_mtd.json', 'r') as inputfile: mtd = json.load(inputfile) for filename in list_filename: # Si le fichier a deja une couleur on la recupere cliche = filename.split(os.path.sep)[-1].split('.')[0] color = None for _r in mtd: for _v in mtd[_r]: for _b in mtd[_r][_v]: if mtd[_r][_v][_b] == cliche: color = [_r, _v, _b] break if color: break if color: break if color is None: print('nouvelle image') color = [randrange(255), randrange(255), randrange(255)] while (color[0] in mtd) and (color[1] in mtd[color[0]]) and ( color[2] in mtd[color[0]][color[1]]): color = [randrange(255), randrange(255), randrange(255)] if color[0] not in mtd: mtd[color[0]] = {} if color[1] not in mtd[color[0]]: mtd[color[0]][color[1]] = {} mtd[color[0]][color[1]][color[2]] = cliche process_image(tiles, db_graph, filename, color, out_raster_srs) with open('cache/cache_mtd.json', 'w') as outfile: json.dump(mtd, outfile)
def _initZoomLayer(self, zoom): west_edge, south_edge, east_edge, north_edge = self.extent first_tile = Tile(*deg2num(north_edge, west_edge, zoom), zoom) last_tile = Tile(*deg2num(south_edge, east_edge, zoom), zoom) first_tile_extent = first_tile.extent() last_tile_extent = last_tile.extent() zoom_extent = [ first_tile_extent[0], last_tile_extent[1], last_tile_extent[2], first_tile_extent[3] ] bounds = ','.join(map(str, zoom_extent)) self._execute_sqlite("UPDATE metadata SET value='{}' WHERE name='bounds'".format(bounds)) self._zoomDs = gdal.OpenEx(self.filename, 1, open_options=['ZOOM_LEVEL=%s' % first_tile.z]) self._first_tile = first_tile self._zoom = zoom
def netcdf_to_geotiff(base_raster_path, target_raster_path): ''' base_raster_path should be in format : r'NETCDF:"path:dimension_name' Converts ALL BANDS ''' gtiff_driver = gdal.GetDriverByName('GTiff') raster = gdal.OpenEx(base_raster_path, gdal.OF_RASTER) target_raster = gtiff_driver.CreateCopy(target_raster_path, raster) # I'm pretty sure this is useless, but didn't check: #target_band = target_raster.GetRasterBand(band_num) #target_band.XSize #target_band.FlushCache() #target_array = target_band.ReadAsArray() #target_band = None target_raster = None
def wordfiler(img_path, shapefile_path, filename_field, pix_res): ds = gdal.OpenEx(shapefile_path, 0) lyr = ds.GetLayer() # print (feat_count) for root, dirs, files in os.walk(img_path): for filename in files: name, ext = os.path.splitext(filename) # print ('name: ',name, ext) if ext == '.jpg': for feature in lyr: # alpha = math.radians(feature.GetField('Angle')) out_filename = feature.GetField(filename_field) # print('outname before loop: ', out_filename) if out_filename == name[:-9]: feat_geom = feature.GetGeometryRef( ) # feature geometry geom = feat_geom.GetGeometryRef( 0 ) # geometry of geometry, without this you can't do GetPoint() pt1 = geom.GetPoint(0) pt2 = geom.GetPoint(1) # always first point needed dx, dy = pt2[0] - pt1[0], pt2[1] - pt1[1] alpha = math.atan2(dx, dy) # rectangle main angle par1, par2 = float(pix_res) * math.cos(alpha), float( pix_res) * math.sin(-alpha) # corner_factor = pix_res * .5 # in order to bind raster to the corner of pixel, not to it's center # print(out_filename, name) out_filename = os.path.join(root, name) with open(out_filename + '.wld', 'w') as out: out.write(str(par1) + '\n') out.write(str(par2) + '\n') out.write(str(par2) + '\n') out.write(str(-par1) + '\n') out.write(str(pt2[0]) + '\n') out.write(str(pt2[1]) + '\n') lyr.ResetReading()
def recode_from_river(watershed_id, river_x, river_y, watershed_tif_path, dir_tif_path, water_tif_path, rivers_index): global water_value # 创建数据集 watershed_ds = gdal.OpenEx(watershed_tif_path, 1) dir_ds = gdal.Open(dir_tif_path) water_ds = gdal.Open(water_tif_path) # 初始化需要赋值的像元集合 update_cells = [[river_x, river_y]] # 更新区域内像元值 # print(">>> update cell:", end='') while len(update_cells) > 0: # 取出要更新的像元索引 update_cell = update_cells.pop() # print(update_cell, end='') # 更新像元值 cu.set_raster_int_value(watershed_ds, update_cell[0], update_cell[1], watershed_id) # print('update: ', update_cell, '->', watershed_id) # 得到邻近像元集合 neighbor_cells = cu.get_8_dir(update_cell[0], update_cell[1]) # 搜索上游像元 for neighbor_cell in neighbor_cells: n_x = neighbor_cell[0] n_y = neighbor_cell[1] # 判断邻近点是否在数据内 if cu.in_data(n_x, n_y, watershed_ds.RasterXSize, watershed_ds.RasterYSize): # 若不为河段并不为湖泊/水库(即若为子流域) water_off = cu.off_transform(n_x, n_y, watershed_ds, water_ds) in_water = cu.is_water_cell(water_ds, water_off[0], water_off[1], water_value) if neighbor_cell not in rivers_index and not in_water: dir_value = cu.get_raster_int_value(dir_ds, n_x, n_y) to_point = cu.get_to_point(n_x, n_y, dir_value) # 若为上游点 if to_point == update_cell: # 加入更新数组 update_cells.append(neighbor_cell) watershed_ds = None dir_ds = None
def processAlgorithm(self, parameters, context, feedback): """ Import layer to the database """ ogrLayer, layerName = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback) dbname = self.parameterAsString(parameters, self.DBNAME, context) name = self.parameterAsString(parameters, self.NAME, context) forceSinglepPartGeometry = self.parameterAsBool(parameters, self.SINGLEPARTGEOMETRY, context) srcDs = gdal.OpenEx(ogrLayer) if not srcDs: raise QgsProcessingException("Failed to open '%s'" % ogrLayer) options = [] if forceSinglepPartGeometry: options.append('-explodecollections') if os.path.exists(dbname): options.append('-update') if feedback: feedback.setProgressText("Importing layer") def callback(pct, msg, data, **kwargs): if msg: feedback.setProgressText(msg) feedback.setProgress(100*pct) else: callback = None ds = gdal.VectorTranslate(dbname, srcDS=srcDs, format='SQLite', datasetCreationOptions=['SPATIALITE=y'], layerName=name, options=options, callback=callback) if ds: del ds else: raise QgsProcessingException("Failed to import '%s'" % ogrLayer) return {}
barred_file_exts = ['jpg', 'csv', 'xls', 'lsx', 'ovr', 'aux', 'asc'] for f in filenames: if f[-3:] not in barred_file_exts: clean_filenames.append(f) paths.append([dirpath, clean_filenames]) return paths paths = getFiles(path) coordinates = [] for p in paths: for f in p[1]: fileName = p[0] + '/' + f dataset = gdal.OpenEx(fileName, gdal.OF_RASTER) print(fileName) if dataset is None: dataset = gdal.OpenEx(fileName, gdal.OF_VECTOR) if dataset != None: # VECTOR lyr = dataset.GetLayer() numLayers = dataset.GetLayerCount() # print('numLayers', numLayers) for l in range(numLayers): # Obtain Layer extent layer = dataset.GetLayer(l) spatialRef = layer.GetSpatialRef() extent = layer.GetExtent()
# Collect and process all DEM files using multiprocessing all_dem_files, all_dem_hs_files = collect_files_mp(fileurls, num_processes, zf, multiDirectional, tile_pyramid_levels, options_dict, tar_dir=tar_dir, dem_dir=dem_dir) destName = '{prefix}.vrt'.format(prefix=outname_prefix) if options_dict['build_vrt_raster']: print("Building VRT {}".format(destName)) gdal.BuildVRT(destName, all_dem_files) if options_dict['build_vrt_overviews']: ds = gdal.OpenEx(destName, 0) # 0 = read-only (create external .ovr file) print("Building pyramids for {}".format(destName)) gdal.SetConfigOption('BIGTIFF', 'YES') gdal.SetConfigOption('BIGTIFF_OVERVIEW', 'YES') gdal.SetConfigOption('COMPRESS_OVERVIEW', 'PACKBITS') ds.GetRasterBand(1).GetStatistics(0, 1) ds.BuildOverviews("NEAREST", vrt_pyramid_levels) del ds destName = '{prefix}_hs.vrt'.format(prefix=outname_prefix) if options_dict['build_vrt_hillshade']: print("Building VRT {}".format(destName)) gdal.BuildVRT(destName, all_dem_hs_files) if options_dict['build_vrt_hillshade_overviews']: ds = gdal.OpenEx(destName, 0) # 0 = read-only (create external .ovr file)
from scipy.stats import gaussian_kde import skimage.morphology as morphology # Python Imaging Library imports from PIL import Image from PIL import ImageDraw import cv2 demFile = r"V:\Data\NGI\GEF DEM\3323d_2015_1001_GEF_DEM_SGM3_clip.tif" demFile = r"V:/Data/NGI/GEF DEM/3323d_2015_1001_GEF_DEM_Photoscan_clip.tif" filtDemFile = r"V:\Data\NGI\GEF DEM\3323d_2015_1001_GEF_DEM_Photoscan_clip_filt.tif" plantHeightFile = r"V:\Data\NGI\GEF DEM\3323d_2015_1001_GEF_DEM_Photoscan_clip_hgt.tif" demDs = gdal.OpenEx(demFile, gdal.OF_RASTER) if demDs is None: print "Open failed./n" print 'Driver: ', demDs.GetDriver().ShortName, '/', \ demDs.GetDriver().LongName print 'Size is ', demDs.RasterXSize, 'x', demDs.RasterYSize, \ 'x', demDs.RasterCount print 'Projection is ', demDs.GetProjection() geotransform = demDs.GetGeoTransform() if not geotransform is None: print 'Origin = (', geotransform[0], ',', geotransform[3], ')' print 'Pixel Size = (', geotransform[1], ',', geotransform[5], ')' pixelSize = geotransform[1] dem = demDs.GetRasterBand(1).ReadAsArray()
def describe_fields(self): """ Returns a dict of the layers with fields and field types. """ opened_file = self.data description = [] if not opened_file: opened_file = self.open() driver = opened_file.GetDriver().ShortName # Get Vector Layers: if dataset contains vector layers, GetLayerCount() # will return > 0 for n in range(0, opened_file.GetLayerCount()): layer = opened_file.GetLayer(n) layer_name = layer.GetName() geometry_type = self.geometry_type(layer) layer_description = { 'layer_name': layer_name, 'feature_count': None, 'fields': [], 'index': n, 'geom_type': geometry_type, 'raster': False, 'layer_type': 'vector', 'driver': driver, 'layer_definition': None } if driver != 'WFS': layer_description['feature_count'] = layer.GetFeatureCount() layer_definition = layer.GetLayerDefn() for i in range(layer_definition.GetFieldCount()): field_desc = {} field = layer_definition.GetFieldDefn(i) field_desc['name'] = field.GetName() field_desc['type'] = field.GetFieldTypeName(i) layer_description['fields'].append(field_desc) description.append(layer_description) # GeoPackage files with tiles in them are mistakenly identified as rasters. # 4 rasters are counted, the red/green/blue/alpha values for an image derived from tiles. if driver.lower() == 'gpkg' and opened_file.RasterCount == 4: conn = sqlite3.connect(self.file) cur = conn.cursor() cur.execute( 'SELECT table_name FROM gpkg_tile_matrix_set ORDER BY table_name;' ) for i, row in enumerate(cur.fetchall()): layer_description = { 'index': i, 'layer_name': row[0], 'path': self.file, 'raster': False, 'layer_type': 'tile', 'driver': driver, } description.append(layer_description) # Get Raster Layers: if they exist, RasterCount returns total band count # Get main layer first. elif opened_file.RasterCount > 0: layer_description = { 'index': len(description), 'layer_name': self.file, 'path': self.file, 'raster': True, 'layer_type': 'raster', 'driver': driver } description.append(layer_description) # Get sub layers, if present raster_list = opened_file.GetSubDatasets() for m in range(0, raster_list.__len__()): layer = gdal.OpenEx(raster_list[m][0]) layer_description = { 'index': len(description), 'subdataset_index': m, 'path': raster_list[m][0], 'layer_name': raster_list[m][0].split(':')[-1], 'layer_type': 'raster', 'raster': True, 'driver': driver } description.append(layer_description) return description
print(xarr0.coords) print('rows',rows) print('cols',cols) _coords=xarr0.coords print('bandas xarr0',list(xarr0.data_vars)) lista=list(xarr0.data_vars) geo_transform=(_coords["longitude"].values[0], 0.000269995,0, _coords["latitude"].values[0],0,-0.000271302) proj = xarr0.crs.crs_wkt ### data_source = gdal.OpenEx(train_folder_path, gdal.OF_VECTOR) layer = data_source.GetLayer(0) print('layer') print(type(layer)) print(layer) lyr=data_source.GetLayer() print('lyr') print(type(lyr)) print(lyr) pAttributo = 'K10' pAttrAGB = 'cha_HD' print('test 1')
tmp_dir = os.path.join(setsm_dir,tile, 'all_strips') mkdir_p(tmp_dir) list_tmp_dem = [os.path.join(tmp_dir, os.path.splitext(os.path.splitext(os.path.basename(seg_tar_gz))[0])[0] + '_dem.tif') for seg_tar_gz in seg_tar_gz_list] for seg_tar_gz in seg_tar_gz_list: print('Extracting dem file of segment ' + str(seg_tar_gz_list.index(seg_tar_gz) + 1) + ' out of ' + str(len(seg_tar_gz_list))) extract_file_from_tar_gz(seg_tar_gz, os.path.splitext(os.path.splitext(os.path.basename(seg_tar_gz))[0])[0] + '_dem.tif', list_tmp_dem[seg_tar_gz_list.index(seg_tar_gz)]) list_files = glob(os.path.join(setsm_dir,'**/*_dem.tif'),recursive=True) fn_shp = '/data/icesat/travail_en_cours/romain/data/outlines/rgi60/regions/rgi60_merge.shp' ds_shp = gdal.OpenEx(fn_shp, gdal.OF_VECTOR) layer_name = os.path.splitext(os.path.basename(fn_shp))[0] layer = ds_shp.GetLayer() epsg_base = 4326 for gla in list_19: list_final = [] list_cov = [] list_date = [] print('Working on glacier: '+gla[1]) gla_dir = os.path.join(out_dir,gla[1]) mkdir_p(gla_dir)