def mvt(request): z = int(request.GET["z"]) x = int(request.GET["x"]) y = int(request.GET["y"]) extent = int(request.GET.get('extent', 4096)) simplification = float(request.GET.get("simplification", extent / 512)) resids = map( int, filter(None, request.GET["resource"].split(",")), ) # web mercator merc = SRS.filter_by(id=3857).one() minx, miny, maxx, maxy = merc.tile_extent((z, x, y)) # 5% padding by default padding = float(request.GET.get("padding", 0.05)) bbox = ( minx - (maxx - minx) * padding, miny - (maxy - miny) * padding, maxx + (maxx - minx) * padding, maxy + (maxy - miny) * padding, ) bbox = box(*bbox, srid=merc.id) options = [ "FORMAT=DIRECTORY", "TILE_EXTENSION=pbf", "MINZOOM=%d" % z, "MAXZOOM=%d" % z, "EXTENT=%d" % extent, "COMPRESS=NO", ] ds = _ogr_ds(b"MVT", options) vsibuf = ds.GetName() for resid in resids: obj = Resource.filter_by(id=resid).one() request.resource_permission(PERM_READ, obj) query = obj.feature_query() query.intersects(bbox) query.geom() if IFeatureQueryClipByBox.providedBy(query): query.clip_by_box(bbox) if IFeatureQuerySimplify.providedBy(query): tolerance = ((obj.srs.maxx - obj.srs.minx) / (1 << z)) / extent query.simplify(tolerance * simplification) _ogr_layer_from_features(obj, query(), name=b"ngw:%d" % obj.id, ds=ds) # flush changes ds = None filepath = os.path.join("%s" % vsibuf, "%d" % z, "%d" % x, "%d.pbf" % y) try: f = gdal.VSIFOpenL(b"%s" % (filepath, ), b"rb") if f is not None: # SEEK_END = 2 gdal.VSIFSeekL(f, 0, 2) size = gdal.VSIFTellL(f) # SEEK_SET = 0 gdal.VSIFSeekL(f, 0, 0) content = gdal.VSIFReadL(1, size, f) gdal.VSIFCloseL(f) return Response( content, content_type=b"application/vnd.mapbox-vector-tile", ) else: return HTTPNoContent() finally: gdal.Unlink(b"%s" % (vsibuf, ))
def jpeg_19(): import struct for (width, height, iX) in [(32, 32, 12), (25, 25, 8), (24, 25, 8)]: src_ds = gdal.GetDriverByName('GTiff').Create('/vsimem/jpeg_19.tif', width, height, 1) src_ds.CreateMaskBand(gdal.GMF_PER_DATASET) src_ds.GetRasterBand(1).GetMaskBand().WriteRaster( 0, 0, iX, height, struct.pack('B' * 1, 255), 1, 1) src_ds.GetRasterBand(1).GetMaskBand().WriteRaster( iX, 0, width - iX, height, struct.pack('B' * 1, 0), 1, 1) tiff_mask_data = src_ds.GetRasterBand(1).GetMaskBand().ReadRaster( 0, 0, width, height) # Generate a JPEG file with a (default) LSB bit mask order out_ds = gdal.GetDriverByName('JPEG').CreateCopy( '/vsimem/jpeg_19.jpg', src_ds) out_ds = None # Generate a JPEG file with a MSB bit mask order gdal.SetConfigOption('JPEG_WRITE_MASK_BIT_ORDER', 'MSB') out_ds = gdal.GetDriverByName('JPEG').CreateCopy( '/vsimem/jpeg_19_msb.jpg', src_ds) del out_ds gdal.SetConfigOption('JPEG_WRITE_MASK_BIT_ORDER', None) src_ds = None # Check that the file are indeed different statBuf = gdal.VSIStatL('/vsimem/jpeg_19.jpg') f = gdal.VSIFOpenL('/vsimem/jpeg_19.jpg', 'rb') data1 = gdal.VSIFReadL(1, statBuf.size, f) gdal.VSIFCloseL(f) statBuf = gdal.VSIStatL('/vsimem/jpeg_19_msb.jpg') f = gdal.VSIFOpenL('/vsimem/jpeg_19_msb.jpg', 'rb') data2 = gdal.VSIFReadL(1, statBuf.size, f) gdal.VSIFCloseL(f) if (width, height, iX) == (24, 25, 8): if data1 != data2: gdaltest.post_reason('fail') return 'fail' else: if data1 == data2: gdaltest.post_reason('fail') return 'fail' # Check the file with the LSB bit mask order ds = gdal.Open('/vsimem/jpeg_19.jpg') jpg_mask_data = ds.GetRasterBand(1).GetMaskBand().ReadRaster( 0, 0, width, height) ds = None if tiff_mask_data != jpg_mask_data: gdaltest.post_reason('fail') return 'fail' # Check the file with the MSB bit mask order ds = gdal.Open('/vsimem/jpeg_19_msb.jpg') jpg_mask_data = ds.GetRasterBand(1).GetMaskBand().ReadRaster( 0, 0, width, height) ds = None if tiff_mask_data != jpg_mask_data: gdaltest.post_reason('fail') return 'fail' gdal.GetDriverByName('GTiff').Delete('/vsimem/jpeg_19.tif') gdal.GetDriverByName('JPEG').Delete('/vsimem/jpeg_19.jpg') gdal.GetDriverByName('JPEG').Delete('/vsimem/jpeg_19_msb.jpg') return 'success'
def vsifile_generic(filename): start_time = time.time() fp = gdal.VSIFOpenL(filename, 'wb+') assert fp is not None assert gdal.VSIFWriteL('0123456789', 1, 10, fp) == 10 assert gdal.VSIFFlushL(fp) == 0 assert gdal.VSIFTruncateL(fp, 20) == 0 assert gdal.VSIFTellL(fp) == 10 assert gdal.VSIFTruncateL(fp, 5) == 0 assert gdal.VSIFTellL(fp) == 10 assert gdal.VSIFSeekL(fp, 0, 2) == 0 assert gdal.VSIFTellL(fp) == 5 gdal.VSIFWriteL('XX', 1, 2, fp) gdal.VSIFCloseL(fp) statBuf = gdal.VSIStatL( filename, gdal.VSI_STAT_EXISTS_FLAG | gdal.VSI_STAT_NATURE_FLAG | gdal.VSI_STAT_SIZE_FLAG) assert statBuf.size == 7 assert abs(start_time - statBuf.mtime) <= 2 fp = gdal.VSIFOpenL(filename, 'rb') buf = gdal.VSIFReadL(1, 7, fp) assert gdal.VSIFWriteL('a', 1, 1, fp) == 0 assert gdal.VSIFTruncateL(fp, 0) != 0 gdal.VSIFCloseL(fp) assert buf.decode('ascii') == '01234XX' # Test append mode on existing file fp = gdal.VSIFOpenL(filename, 'ab') gdal.VSIFWriteL('XX', 1, 2, fp) gdal.VSIFCloseL(fp) statBuf = gdal.VSIStatL( filename, gdal.VSI_STAT_EXISTS_FLAG | gdal.VSI_STAT_NATURE_FLAG | gdal.VSI_STAT_SIZE_FLAG) assert statBuf.size == 9 assert gdal.Unlink(filename) == 0 statBuf = gdal.VSIStatL(filename, gdal.VSI_STAT_EXISTS_FLAG) assert statBuf is None # Test append mode on non existing file fp = gdal.VSIFOpenL(filename, 'ab') gdal.VSIFWriteL('XX', 1, 2, fp) gdal.VSIFCloseL(fp) statBuf = gdal.VSIStatL( filename, gdal.VSI_STAT_EXISTS_FLAG | gdal.VSI_STAT_NATURE_FLAG | gdal.VSI_STAT_SIZE_FLAG) assert statBuf.size == 2 assert gdal.Unlink(filename) == 0
def parse_jp2_box(xml_tree, out_f, src_jp2file): if not (xml_tree[XML_TYPE_IDX] == gdal.CXT_Element and xml_tree[XML_VALUE_IDX] == 'JP2Box'): print('Not a JP2Box element') return False jp2box_name = get_attribute_val(xml_tree, 'name') if jp2box_name is None: print('Cannot find JP2Box.name attribute') return False if len(jp2box_name) != 4: print('Invalid JP2Box.name : %s' % jp2box_name) return False hex_binary_content = get_node_content( find_xml_node(xml_tree, 'BinaryContent', immediate_child=True)) decoded_content = find_xml_node(xml_tree, 'DecodedContent', immediate_child=True) decoded_geotiff = find_xml_node(xml_tree, 'DecodedGeoTIFF', immediate_child=True) text_content = get_node_content( find_xml_node(xml_tree, 'TextContent', immediate_child=True)) xml_content = find_xml_node(xml_tree, 'XMLContent', immediate_child=True) jp2box = find_xml_node(xml_tree, 'JP2Box', immediate_child=True) jp2codestream = find_xml_node(xml_tree, 'JP2KCodeStream', immediate_child=True) if hex_binary_content: if decoded_content or decoded_geotiff or text_content or xml_content or jp2box: print( 'BinaryContent found, and one of DecodedContent/DecodedGeoTIFF/TextContent/XMLContent/JP2Box. The latter will be ignored' ) if jp2box_name == 'uuid': uuid = get_node_content( find_xml_node(xml_tree, 'UUID', immediate_child=True)) if uuid is None: print('Cannot find JP2Box.UUID element') return False else: uuid = '' out_f.write( struct.pack( '>I' * 1, 8 + int(len(hex_binary_content) / 2) + int(len(uuid) / 2))) out_f.write(jp2box_name.encode('ascii')) write_hexstring_as_binary(uuid, out_f) write_hexstring_as_binary(hex_binary_content, out_f) elif decoded_content: if decoded_geotiff or text_content or xml_content or jp2box: print( 'DecodedContent found, and one of DecodedGeoTIFF/TextContent/XMLContent/JP2Box. The latter will be ignored' ) pos = out_f.tell() out_f.write(struct.pack('>I' * 1, 0)) out_f.write(jp2box_name.encode('ascii')) for child_idx in range(XML_FIRST_CHILD_IDX, len(decoded_content)): child = decoded_content[child_idx] if child[XML_TYPE_IDX] == gdal.CXT_Element and child[ XML_VALUE_IDX] == 'Field': if not parse_field(child, out_f, src_jp2file): return False new_pos = out_f.tell() out_f.seek(pos, 0) out_f.write(struct.pack('>I' * 1, new_pos - pos)) out_f.seek(new_pos, 0) elif text_content: if decoded_geotiff or xml_content or jp2box: print( 'TextContent found, and one of DecodedGeoTIFF/XMLContent/JP2Box. The latter will be ignored' ) out_f.write(struct.pack('>I' * 1, 8 + len(text_content))) out_f.write(jp2box_name.encode('ascii')) out_f.write(text_content.encode('latin1')) elif xml_content: if decoded_geotiff or jp2box: print( 'XMLContent found, and one of DecodedGeoTIFF/JP2Box. The latter will be ignored' ) serialized_xml_content = gdal.SerializeXMLTree( xml_content[XML_FIRST_CHILD_IDX]) out_f.write(struct.pack('>I' * 1, 8 + len(serialized_xml_content))) out_f.write(jp2box_name.encode('ascii')) out_f.write(serialized_xml_content.encode('latin1')) elif jp2box: if decoded_geotiff: print( 'JP2Box found, and one of DecodedGeoTIFF. The latter will be ignored' ) pos = out_f.tell() out_f.write(struct.pack('>I' * 1, 0)) out_f.write(jp2box_name.encode('ascii')) for child_idx in range(XML_FIRST_CHILD_IDX, len(xml_tree)): child = xml_tree[child_idx] if child[XML_TYPE_IDX] == gdal.CXT_Element and child[ XML_VALUE_IDX] == 'JP2Box': if not parse_jp2_box(child, out_f, src_jp2file): return False new_pos = out_f.tell() out_f.seek(pos, 0) out_f.write(struct.pack('>I' * 1, new_pos - pos)) out_f.seek(new_pos, 0) elif decoded_geotiff: serialized_xml_content = gdal.SerializeXMLTree( decoded_geotiff[XML_FIRST_CHILD_IDX]) vrt_ds = gdal.Open(serialized_xml_content) if vrt_ds is None: print('Cannot decode VRTDataset. Outputing empty content') binary_content = '' else: out_ds = gdal.GetDriverByName('GTiff').CreateCopy( '/vsimem/out.tif', vrt_ds) del out_ds tif_f = gdal.VSIFOpenL('/vsimem/out.tif', 'rb') binary_content = gdal.VSIFReadL(1, 10000, tif_f) gdal.VSIFCloseL(tif_f) uuid = get_node_content( find_xml_node(xml_tree, 'UUID', immediate_child=True)) if uuid is None: uuid = 'B14BF8BD083D4B43A5AE8CD7D5A6CE03' out_f.write( struct.pack('>I' * 1, 8 + len(binary_content) + int(len(uuid) / 2))) out_f.write(jp2box_name.encode('ascii')) write_hexstring_as_binary(uuid, out_f) out_f.write(binary_content) elif jp2codestream: pos = out_f.tell() out_f.write(struct.pack('>I' * 1, 0)) out_f.write(jp2box_name.encode('ascii')) if not parse_jp2codestream(None, jp2codestream, out_f, src_jp2file): return False new_pos = out_f.tell() out_f.seek(pos, 0) out_f.write(struct.pack('>I' * 1, new_pos - pos)) out_f.seek(new_pos, 0) else: data_offset = get_attribute_val(xml_tree, 'data_offset') if data_offset is None: print('Cannot find JP2Box.data_offset attribute') return False data_offset = int(data_offset) data_length = get_attribute_val(xml_tree, 'data_length') if data_length is None: print('Cannot find JP2Box.data_length attribute') return False data_length = int(data_length) src_jp2file.seek(data_offset, 0) data = src_jp2file.read(data_length) out_f.write(struct.pack('>I' * 1, 8 + data_length)) out_f.write(jp2box_name.encode('ascii')) out_f.write(data) return True
def process(argv, progress=None, progress_arg=None): if not argv: return Usage() dst_filename = None output_format = None src_datasets = [] overwrite_ds = False overwrite_layer = False update = False append = False single_layer = False layer_name_template = None skip_failures = False src_geom_types = [] field_strategy = None src_layer_field_name = None src_layer_field_content = None a_srs = None s_srs = None t_srs = None dsco = [] lco = [] i = 0 while i < len(argv): arg = argv[i] if (arg == '-f' or arg == '-of') and i + 1 < len(argv): i = i + 1 output_format = argv[i] elif arg == '-o' and i + 1 < len(argv): i = i + 1 dst_filename = argv[i] elif arg == '-progress': progress = ogr.TermProgress_nocb progress_arg = None elif arg == '-q' or arg == '-quiet': pass elif arg[0:5] == '-skip': skip_failures = True elif arg == '-update': update = True elif arg == '-overwrite_ds': overwrite_ds = True elif arg == '-overwrite_layer': overwrite_layer = True update = True elif arg == '-append': append = True update = True elif arg == '-single': single_layer = True elif arg == '-a_srs' and i + 1 < len(argv): i = i + 1 a_srs = argv[i] elif arg == '-s_srs' and i + 1 < len(argv): i = i + 1 s_srs = argv[i] elif arg == '-t_srs' and i + 1 < len(argv): i = i + 1 t_srs = argv[i] elif arg == '-nln' and i + 1 < len(argv): i = i + 1 layer_name_template = argv[i] elif arg == '-field_strategy' and i + 1 < len(argv): i = i + 1 field_strategy = argv[i] elif arg == '-src_layer_field_name' and i + 1 < len(argv): i = i + 1 src_layer_field_name = argv[i] elif arg == '-src_layer_field_content' and i + 1 < len(argv): i = i + 1 src_layer_field_content = argv[i] elif arg == '-dsco' and i + 1 < len(argv): i = i + 1 dsco.append(argv[i]) elif arg == '-lco' and i + 1 < len(argv): i = i + 1 lco.append(argv[i]) elif arg == '-src_geom_type' and i + 1 < len(argv): i = i + 1 src_geom_type_names = argv[i].split(',') for src_geom_type_name in src_geom_type_names: src_geom_type = _GetGeomType(src_geom_type_name) if src_geom_type is None: print('ERROR: Unrecognized geometry type: %s' % src_geom_type_name) return 1 src_geom_types.append(src_geom_type) elif arg[0] == '-': print('ERROR: Unrecognized argument : %s' % arg) return Usage() else: if '*' in arg: if sys.version_info < (3,0,0): src_datasets += [fn.decode(sys.getfilesystemencoding()) for fn in glob.glob(arg)] else: src_datasets += glob.glob(arg) else: src_datasets.append(arg) i = i + 1 if dst_filename is None: print('Missing -o') return 1 if update: if output_format is not None: print('ERROR: -f incompatible with -update') return 1 if dsco: print('ERROR: -dsco incompatible with -update') return 1 output_format = '' else: if output_format is None: output_format = GetOutputDriverFor(dst_filename) if src_layer_field_content is None: src_layer_field_content = '{AUTO_NAME}' elif src_layer_field_name is None: src_layer_field_name = 'source_ds_lyr' if not single_layer and output_format == 'ESRI Shapefile' and \ dst_filename.lower().endswith('.shp'): print('ERROR: Non-single layer mode incompatible with non-directory ' 'shapefile output') return 1 if not src_datasets: print('ERROR: No source datasets') return 1 if layer_name_template is None: if single_layer: layer_name_template = 'merged' else: layer_name_template = '{AUTO_NAME}' vrt_filename = None if not EQUAL(output_format, 'VRT'): dst_ds = gdal.OpenEx(dst_filename, gdal.OF_VECTOR | gdal.OF_UPDATE) if dst_ds is not None: if not update and not overwrite_ds: print('ERROR: Destination dataset already exists, ' + 'but -update nor -overwrite_ds are specified') return 1 if overwrite_ds: drv = dst_ds.GetDriver() dst_ds = None if drv.GetDescription() == 'OGR_VRT': # We don't want to destroy the sources of the VRT gdal.Unlink(dst_filename) else: drv.Delete(dst_filename) elif update: print('ERROR: Destination dataset does not exist') return 1 if dst_ds is None: drv = gdal.GetDriverByName(output_format) if drv is None: print('ERROR: Invalid driver: %s' % output_format) return 1 dst_ds = drv.Create( dst_filename, 0, 0, 0, gdal.GDT_Unknown, dsco) if dst_ds is None: return 1 vrt_filename = '/vsimem/_ogrmerge_.vrt' else: if gdal.VSIStatL(dst_filename) and not overwrite_ds: print('ERROR: Destination dataset already exists, ' + 'but -overwrite_ds are specified') return 1 vrt_filename = dst_filename f = gdal.VSIFOpenL(vrt_filename, 'wb') if f is None: print('ERROR: Cannot create %s' % vrt_filename) return 1 writer = XMLWriter(f) writer.open_element('OGRVRTDataSource') if single_layer: ogr_vrt_union_layer_written = False for src_ds_idx, src_dsname in enumerate(src_datasets): src_ds = ogr.Open(src_dsname) if src_ds is None: print('ERROR: Cannot open %s' % src_dsname) if skip_failures: continue gdal.VSIFCloseL(f) gdal.Unlink(vrt_filename) return 1 for src_lyr_idx, src_lyr in enumerate(src_ds): if src_geom_types: gt = ogr.GT_Flatten(src_lyr.GetGeomType()) if gt not in src_geom_types: continue if not ogr_vrt_union_layer_written: ogr_vrt_union_layer_written = True writer.open_element('OGRVRTUnionLayer', attrs={'name': layer_name_template}) if src_layer_field_name is not None: writer.write_element_value('SourceLayerFieldName', src_layer_field_name) if field_strategy is not None: writer.write_element_value('FieldStrategy', field_strategy) layer_name = src_layer_field_content src_lyr_name = src_lyr.GetName() try: src_lyr_name = src_lyr_name.decode('utf-8') except AttributeError: pass basename = None if os.path.exists(src_dsname): basename = os.path.basename(src_dsname) if '.' in basename: basename = '.'.join(basename.split(".")[0:-1]) if basename == src_lyr_name: layer_name = layer_name.replace('{AUTO_NAME}', basename) elif basename is None: layer_name = layer_name.replace( '{AUTO_NAME}', 'Dataset%d_%s' % (src_ds_idx, src_lyr_name)) else: layer_name = layer_name.replace( '{AUTO_NAME}', basename + '_' + src_lyr_name) if basename is not None: layer_name = layer_name.replace('{DS_BASENAME}', basename) else: layer_name = layer_name.replace('{DS_BASENAME}', src_dsname) layer_name = layer_name.replace('{DS_NAME}', '%s' % src_dsname) layer_name = layer_name.replace('{DS_INDEX}', '%d' % src_ds_idx) layer_name = layer_name.replace('{LAYER_NAME}', src_lyr_name) layer_name = layer_name.replace('{LAYER_INDEX}', '%d' % src_lyr_idx) if t_srs is not None: writer.open_element('OGRVRTWarpedLayer') writer.open_element('OGRVRTLayer', attrs={'name': layer_name}) attrs = {} if EQUAL(output_format, 'VRT') and \ os.path.exists(src_dsname) and \ not os.path.isabs(src_dsname) and \ '/' not in vrt_filename and \ '\\' not in vrt_filename: attrs['relativeToVRT'] = '1' if single_layer: attrs['shared'] = '1' writer.write_element_value('SrcDataSource', src_dsname, attrs=attrs) writer.write_element_value('SrcLayer', src_lyr.GetName()) if a_srs is not None: writer.write_element_value('LayerSRS', a_srs) writer.close_element('OGRVRTLayer') if t_srs is not None: if s_srs is not None: writer.write_element_value('SrcSRS', s_srs) writer.write_element_value('TargetSRS', t_srs) writer.close_element('OGRVRTWarpedLayer') if ogr_vrt_union_layer_written: writer.close_element('OGRVRTUnionLayer') else: for src_ds_idx, src_dsname in enumerate(src_datasets): src_ds = ogr.Open(src_dsname) if src_ds is None: print('ERROR: Cannot open %s' % src_dsname) if skip_failures: continue gdal.VSIFCloseL(f) gdal.Unlink(vrt_filename) return 1 for src_lyr_idx, src_lyr in enumerate(src_ds): if src_geom_types: gt = ogr.GT_Flatten(src_lyr.GetGeomType()) if gt not in src_geom_types: continue src_lyr_name = src_lyr.GetName() try: src_lyr_name = src_lyr_name.decode('utf-8') except AttributeError: pass layer_name = layer_name_template basename = None if os.path.exists(src_dsname): basename = os.path.basename(src_dsname) if '.' in basename: basename = '.'.join(basename.split(".")[0:-1]) if basename == src_lyr_name: layer_name = layer_name.replace('{AUTO_NAME}', basename) elif basename is None: layer_name = layer_name.replace( '{AUTO_NAME}', 'Dataset%d_%s' % (src_ds_idx, src_lyr_name)) else: layer_name = layer_name.replace( '{AUTO_NAME}', basename + '_' + src_lyr_name) if basename is not None: layer_name = layer_name.replace('{DS_BASENAME}', basename) elif '{DS_BASENAME}' in layer_name: if skip_failures: if '{DS_INDEX}' not in layer_name: layer_name = layer_name.replace( '{DS_BASENAME}', 'Dataset%d' % src_ds_idx) else: print('ERROR: Layer name template %s ' 'includes {DS_BASENAME} ' 'but %s is not a file' % (layer_name_template, src_dsname)) gdal.VSIFCloseL(f) gdal.Unlink(vrt_filename) return 1 layer_name = layer_name.replace('{DS_NAME}', '%s' % src_dsname) layer_name = layer_name.replace('{DS_INDEX}', '%d' % src_ds_idx) layer_name = layer_name.replace('{LAYER_NAME}', src_lyr_name) layer_name = layer_name.replace('{LAYER_INDEX}', '%d' % src_lyr_idx) if t_srs is not None: writer.open_element('OGRVRTWarpedLayer') writer.open_element('OGRVRTLayer', attrs={'name': layer_name}) attrs = {} if EQUAL(output_format, 'VRT') and \ os.path.exists(src_dsname) and \ not os.path.isabs(src_dsname) and \ '/' not in vrt_filename and \ '\\' not in vrt_filename: attrs['relativeToVRT'] = '1' if single_layer: attrs['shared'] = '1' writer.write_element_value('SrcDataSource', src_dsname, attrs=attrs) writer.write_element_value('SrcLayer', src_lyr_name) if a_srs is not None: writer.write_element_value('LayerSRS', a_srs) writer.close_element('OGRVRTLayer') if t_srs is not None: if s_srs is not None: writer.write_element_value('SrcSRS', s_srs) writer.write_element_value('TargetSRS', t_srs) writer.close_element('OGRVRTWarpedLayer') writer.close_element('OGRVRTDataSource') gdal.VSIFCloseL(f) ret = 0 if not EQUAL(output_format, 'VRT'): accessMode = None if append: accessMode = 'append' elif overwrite_layer: accessMode = 'overwrite' ret = gdal.VectorTranslate(dst_ds, vrt_filename, accessMode=accessMode, layerCreationOptions=lco, skipFailures=skip_failures, callback=progress, callback_data=progress_arg) if ret == 1: ret = 0 else: ret = 1 gdal.Unlink(vrt_filename) return ret
def gdal_pansharpen(argv): argv = gdal.GeneralCmdLineProcessor( argv ) if argv is None: return -1 pan_name = None last_name = None spectral_ds = [] spectral_bands = [] out_name = None bands = [] weights = [] format = 'GTiff' creation_options = [] callback = gdal.TermProgress resampling = None spat_adjust = None verbose_vrt = False num_threads = None bitdepth = None nodata = None i = 1 argc = len(argv) while i < argc: if (argv[i] == '-of' or argv[i] == '-f') and i < len(argv)-1: format = argv[i+1] i = i + 1 elif argv[i] == '-r' and i < len(argv)-1: resampling = argv[i+1] i = i + 1 elif argv[i] == '-spat_adjust' and i < len(argv)-1: spat_adjust = argv[i+1] i = i + 1 elif argv[i] == '-b' and i < len(argv)-1: bands.append(int(argv[i+1])) i = i + 1 elif argv[i] == '-w' and i < len(argv)-1: weights.append(float(argv[i+1])) i = i + 1 elif argv[i] == '-co' and i < len(argv)-1: creation_options.append(argv[i+1]) i = i + 1 elif argv[i] == '-threads' and i < len(argv)-1: num_threads = argv[i+1] i = i + 1 elif argv[i] == '-bitdepth' and i < len(argv)-1: bitdepth = argv[i+1] i = i + 1 elif argv[i] == '-nodata' and i < len(argv)-1: nodata = argv[i+1] i = i + 1 elif argv[i] == '-q': callback = None elif argv[i] == '-verbose_vrt': verbose_vrt = True elif argv[i][0] == '-': sys.stderr.write('Unrecognized option : %s\n' % argv[i]) return Usage() elif pan_name is None: pan_name = argv[i] pan_ds = gdal.Open(pan_name) if pan_ds is None: return 1 else: if last_name is not None: pos = last_name.find(',band=') if pos > 0: spectral_name = last_name[0:pos] ds = gdal.Open(spectral_name) if ds is None: return 1 band_num = int(last_name[pos+len(',band='):]) band = ds.GetRasterBand(band_num) spectral_ds.append(ds) spectral_bands.append(band) else: spectral_name = last_name ds = gdal.Open(spectral_name) if ds is None: return 1 for j in range(ds.RasterCount): spectral_ds.append(ds) spectral_bands.append(ds.GetRasterBand(j+1)) last_name = argv[i] i = i + 1 if pan_name is None or len(spectral_bands) == 0: return Usage() out_name = last_name if len(bands) == 0: bands = [ j+1 for j in range(len(spectral_bands)) ] else: for i in range(len(bands)): if bands[i] < 0 or bands[i] > len(spectral_bands): print('Invalid band number in -b: %d' % bands[i]) return 1 if len(weights) != 0 and len(weights) != len(spectral_bands): print('There must be as many -w values specified as input spectral bands') return 1 vrt_xml = """<VRTDataset subClass="VRTPansharpenedDataset">\n""" if bands != [ j+1 for j in range(len(spectral_bands)) ]: for i in range(len(bands)): band = spectral_bands[bands[i]-1] datatype = gdal.GetDataTypeName(band.DataType) colorname = gdal.GetColorInterpretationName(band.GetColorInterpretation()) vrt_xml += """ <VRTRasterBand dataType="%s" band="%d" subClass="VRTPansharpenedRasterBand"> <ColorInterp>%s</ColorInterp> </VRTRasterBand>\n""" % (datatype, i+1, colorname) vrt_xml += """ <PansharpeningOptions>\n""" if len(weights) != 0: vrt_xml += """ <AlgorithmOptions>\n""" vrt_xml += """ <Weights>""" for i in range(len(weights)): if i > 0: vrt_xml += "," vrt_xml += "%.16g" % weights[i] vrt_xml += "</Weights>\n" vrt_xml += """ </AlgorithmOptions>\n""" if resampling is not None: vrt_xml += ' <Resampling>%s</Resampling>\n' % resampling if num_threads is not None: vrt_xml += ' <NumThreads>%s</NumThreads>\n' % num_threads if bitdepth is not None: vrt_xml += ' <BitDepth>%s</BitDepth>\n' % bitdepth if nodata is not None: vrt_xml += ' <NoData>%s</NoData>\n' % nodata if spat_adjust is not None: vrt_xml += ' <SpatialExtentAdjustment>%s</SpatialExtentAdjustment>\n' % spat_adjust pan_relative='0' if format.upper() == 'VRT': if not os.path.isabs(pan_name): pan_relative='1' pan_name = os.path.relpath(pan_name, os.path.dirname(out_name)) vrt_xml += """ <PanchroBand> <SourceFilename relativeToVRT="%s">%s</SourceFilename> <SourceBand>1</SourceBand> </PanchroBand>\n""" % (pan_relative, pan_name) for i in range(len(spectral_bands)): dstband = '' for j in range(len(bands)): if i + 1 == bands[j]: dstband = ' dstBand="%d"' % (j+1) break ms_relative='0' ms_name = spectral_ds[i].GetDescription() if format.upper() == 'VRT': if not os.path.isabs(ms_name): ms_relative='1' ms_name = os.path.relpath(ms_name, os.path.dirname(out_name)) vrt_xml += """ <SpectralBand%s> <SourceFilename relativeToVRT="%s">%s</SourceFilename> <SourceBand>%d</SourceBand> </SpectralBand>\n""" % (dstband, ms_relative, ms_name, spectral_bands[i].GetBand()) vrt_xml += """ </PansharpeningOptions>\n""" vrt_xml += """</VRTDataset>\n""" if format.upper() == 'VRT': f = gdal.VSIFOpenL(out_name, 'wb') if f is None: print('Cannot create %s' % out_name) return 1 gdal.VSIFWriteL(vrt_xml, 1, len(vrt_xml), f) gdal.VSIFCloseL(f) if verbose_vrt: vrt_ds = gdal.Open(out_name, gdal.GA_Update) vrt_ds.SetMetadata(vrt_ds.GetMetadata()) else: vrt_ds = gdal.Open(out_name) if vrt_ds is None: return 1 return 0 vrt_ds = gdal.Open(vrt_xml) out_ds = gdal.GetDriverByName(format).CreateCopy(out_name, vrt_ds, 0, creation_options, callback = callback) if out_ds is None: return 1 return 0
def test_ecrgtoc_4(): toc_xml = """<Table_of_Contents> <file_header file_status="new"> <file_name>TOC.xml</file_name> </file_header> <product product_title="ProductTitle"> <disc id="DiscId"> <frame_list number_of_frames="2"> <scale size="1:500 K"> <frame name="000000009s0013.lf2"> <frame_path>clfc\\2</frame_path> <frame_version>001</frame_version> <frame_chart_type>lf</frame_chart_type> <frame_zone>2</frame_zone> </frame> </scale> <scale size="1:1000 K"> <frame name="000000009s0013.lf2"> <frame_path>clfc\\2</frame_path> <frame_version>001</frame_version> <frame_chart_type>lf</frame_chart_type> <frame_zone>2</frame_zone> </frame> </scale> </frame_list> </disc> <disc id="DiscId2"> <frame_list number_of_frames="1"> <scale size="1:500 K"> <frame name="000000009t0013.lf2"> <frame_path>clfc\\2</frame_path> <frame_version>001</frame_version> <frame_chart_type>lf</frame_chart_type> <frame_zone>2</frame_zone> </frame> </scale> </frame_list> </disc> </product> <extension_list> <extension code="LF"> <chart_code>LF</chart_code> <chart_type>1:500 K (LFC Day)</chart_type> <chart_scale>1:500 K</chart_scale> <chart_description>LFC Day</chart_description> </extension> </extension_list> </Table_of_Contents>""" f = gdal.VSIFOpenL('/vsimem/TOC.xml', 'wb') gdal.VSIFWriteL(toc_xml, 1, len(toc_xml), f) gdal.VSIFCloseL(f) ds = gdal.Open('/vsimem/TOC.xml') assert ds is not None assert ds.RasterCount == 0, 'bad raster count' expected_gt = (-85.43147208121826, 0.00059486040609137061, 0.0, 37.241379310344833, 0.0, -0.00044985604606525913) gt = ds.GetGeoTransform() for i in range(6): assert abs(gt[i] - expected_gt[i] ) <= 1e-10, 'did not get expected geotransform' wkt = ds.GetProjectionRef() assert wkt.find('WGS 84') != -1, 'did not get expected SRS' filelist = ds.GetFileList() assert len(filelist) == 4, 'did not get expected filelist' subdatasets = ds.GetMetadata('SUBDATASETS') if len(subdatasets) != 6: print(filelist) pytest.fail('did not get expected subdatasets') ds = None ds = gdal.Open( 'ECRG_TOC_ENTRY:ProductTitle:DiscId:1_500_K:/vsimem/TOC.xml') assert ds is not None, 'did not get subdataset' ds = None ds = gdal.Open( 'ECRG_TOC_ENTRY:ProductTitle:DiscId:1_1000_K:/vsimem/TOC.xml') assert ds is not None, 'did not get subdataset' ds = None ds = gdal.Open( 'ECRG_TOC_ENTRY:ProductTitle:DiscId2:1_500_K:/vsimem/TOC.xml') assert ds is not None, 'did not get subdataset' ds = None gdal.PushErrorHandler('CPLQuietErrorHandler') ds = gdal.Open('ECRG_TOC_ENTRY:ProductTitle:DiscId:/vsimem/TOC.xml') gdal.PopErrorHandler() assert ds is None, 'should not have got subdataset' gdal.Unlink('/vsimem/TOC.xml')
def vsicrypt_4(): if not gdaltest.has_vsicrypt: return 'skip' test_file = '/vsicrypt/key=DONT_USE_IN_PROD,sector_size=32,file=/vsimem/file_enc.bin' ref_file = '/vsimem/file.bin' for seed in range(1000): gdal.Unlink(test_file) gdal.Unlink(ref_file) test_f = gdal.VSIFOpenL(test_file, 'wb+') ref_f = gdal.VSIFOpenL(ref_file, 'wb+') import random random.seed(seed) for i in range(20): random_offset = random.randint(0, 1000) gdal.VSIFSeekL(test_f, random_offset, 0) gdal.VSIFSeekL(ref_f, random_offset, 0) random_size = random.randint(1, 80) random_content = ''.join([ chr(40 + int(10 * random.random())) for i in range(random_size) ]) gdal.VSIFWriteL(random_content, 1, random_size, test_f) gdal.VSIFWriteL(random_content, 1, random_size, ref_f) if random.randint(0, 1) == 0: random_offset = random.randint(0, 1500) gdal.VSIFSeekL(test_f, random_offset, 0) gdal.VSIFSeekL(ref_f, random_offset, 0) random_size = random.randint(1, 80) test_content = gdal.VSIFReadL(1, random_size, test_f) ref_content = gdal.VSIFReadL(1, random_size, ref_f) if test_content != ref_content: print(seed) print('Test content (%d):' % len(test_content)) print(test_content) print('') print('Ref content (%d):' % len(ref_content)) print(ref_content) return 'fail' gdal.VSIFSeekL(test_f, 0, 0) gdal.VSIFSeekL(ref_f, 0, 0) test_content = gdal.VSIFReadL(1, 100000, test_f) ref_content = gdal.VSIFReadL(1, 100000, ref_f) if test_content != ref_content: print(seed) print('Test content (%d):' % len(test_content)) print(test_content) print('') print('Ref content (%d):' % len(ref_content)) print(ref_content) return 'fail' gdal.Unlink(test_file) gdal.Unlink(ref_file) return 'success'
def vsicrypt_2(): if not gdaltest.has_vsicrypt: return 'skip' # Missing key with gdaltest.error_handler(): fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'wb+') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Invalid file with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/not_existing/not_existing', 'wb') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Invalid file with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/not_existing/not_existing', 'rb') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Invalid file with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/not_existing/not_existing', 'ab') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Invalid access with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/not_existing/not_existing', 'foo') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Key to short with gdaltest.error_handler(): fp = gdal.VSIFOpenL('/vsicrypt/key=a,file=/vsimem/file.bin', 'wb+') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Invalid signature gdal.FileFromMemBuffer('/vsimem/file.bin', 'foo') with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'rb') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Generate empty file fp = gdal.VSIFOpenL('/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'wb') gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL('/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'rb') if fp is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL('/vsimem/file.bin', 'rb') header = gdal.VSIFReadL(1, 1000, fp) gdal.VSIFCloseL(fp) if len(header) != 46: gdaltest.post_reason('fail') print(len(header)) return 'fail' # Test shortening header for i in range(46): fp = gdal.VSIFOpenL('/vsimem/file.bin', 'wb') gdal.VSIFWriteL(header, 1, 46 - 1 - i, fp) gdal.VSIFCloseL(fp) with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'rb') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Test corrupting all bytes of header for i in range(46): for val in (0, 127, 255): fp = gdal.VSIFOpenL('/vsimem/file.bin', 'wb') try: new_byte = chr(val).encode('latin1') except: new_byte = chr(val) header_new = header[0:i] + new_byte + header[i + 1:] gdal.VSIFWriteL(header_new, 1, 46, fp) gdal.VSIFCloseL(fp) with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=' '/vsimem/file.bin', 'rb') if fp is not None: gdal.VSIFCloseL(fp) gdal.SetConfigOption('VSICRYPT_IV', 'TOO_SHORT') with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=' '/vsimem/file.bin', 'wb') gdal.SetConfigOption('VSICRYPT_IV', None) if fp is not None: gdal.VSIFCloseL(fp) # Inconsistent initial vector. header = struct.pack( 'B' * 38, 86, 83, 73, 67, 82, 89, 80, 84, # signature 38, 0, # header size 1, # major 0, # minor 0, 2, # sector size 0, # alg 0, # mode 8, #size of IV (should be 16) 32, 13, 169, 71, 154, 208, 22, 32, #IV 0, 0, # size of free text 0, # size of key check 0, 0, 0, 0, 0, 0, 0, 0, # size of unencrypted file 0, 0 # size of extra content ) fp = gdal.VSIFOpenL('/vsimem/file.bin', 'wb') gdal.VSIFWriteL(header, 1, len(header), fp) gdal.VSIFCloseL(fp) with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'rb') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Inconsistent initial vector with key check. header = struct.pack( 'B' * 39, 86, 83, 73, 67, 82, 89, 80, 84, # signature 39, 0, # header size 1, # major 0, # minor 0, 2, # sector size 0, # alg 0, # mode 8, #size of IV (should be 16) 32, 13, 169, 71, 154, 208, 22, 32, #IV 0, 0, # size of free text 1, # size of key check 0, # key check 0, 0, 0, 0, 0, 0, 0, 0, # size of unencrypted file 0, 0 # size of extra content ) fp = gdal.VSIFOpenL('/vsimem/file.bin', 'wb') gdal.VSIFWriteL(header, 1, len(header), fp) gdal.VSIFCloseL(fp) with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'rb') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Test reading with wrong key fp = gdal.VSIFOpenL('/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'wb') gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL('/vsicrypt/key=dont_use_in_prod,file=/vsimem/file.bin', 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) if content == 'hello': gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): fp = gdal.VSIFOpenL('/vsicrypt/key=short_key,file=/vsimem/file.bin', 'ab') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Test reading with wrong key with add_key_check fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,add_key_check=yes,file=/vsimem/file.bin', 'wb') gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=dont_use_in_prod,file=/vsimem/file.bin', 'rb') if fp is not None: gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): fp = gdal.VSIFOpenL('/vsicrypt/key=short_key,file=/vsimem/file.bin', 'ab') if fp is not None: gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=dont_use_in_prod,file=/vsimem/file.bin', 'ab') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Test creating with potentially not build-in alg: with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/alg=blowfish,key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'wb') if fp is not None: gdal.VSIFCloseL(fp) # Invalid sector_size with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,sector_size=1,file=/vsimem/file.bin', 'wb') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Sector size (16) should be at least twice larger than the block size (16) in CBC_CTS with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,sector_size=16,mode=CBC_CTS,file=/vsimem/file.bin', 'wb') if fp is not None: gdaltest.post_reason('fail') return 'fail' gdal.Unlink('/vsimem/file.bin') return 'success'
def test_vrtmultidim_serialize(): tmpfile = '/vsimem/test.vrt' gdal.FileFromMemBuffer( tmpfile, """<VRTDataset> <Group name="/"> <Dimension name="Y" size="4" indexingVariable="Y"/> <Dimension name="X" size="3"/> <Array name="Y"> <DataType>Float64</DataType> <DimensionRef ref="Y"/> <RegularlySpacedValues start="0.5" increment="10.5"/> </Array> <Array name="ar"> <DataType>Float64</DataType> <DimensionRef ref="Y"/> <Dimension name="myX" size="3"/> <Source> <SourceFilename>data/byte.tif</SourceFilename> <SourceBand>1</SourceBand> <SourceSlab offset="1,1" count="2,2" step="2,1"/> <DestSlab offset="2,1"/> </Source> <Source> <SourceFilename>foo</SourceFilename> <SourceArray>the_array</SourceArray> <SourceTranspose>1,0</SourceTranspose> <SourceView>[...]</SourceView> </Source> <ConstantValue>15</ConstantValue> <InlineValues> 0 1 2 3 4 5 6 7 8 9 10 11 </InlineValues> <Attribute name="bar"> <DataType>Int32</DataType> <Value>1</Value> </Attribute> </Array> <Array name="ar_string_no_dim"> <DataType>String</DataType> <InlineValuesWithValueElement> <Value>foo</Value> </InlineValuesWithValueElement> </Array> <Array name="ar_string_with_dim"> <DataType>String</DataType> <DimensionRef ref="X"/> <InlineValuesWithValueElement> <Value>foo</Value> <Value>bar</Value> <Value>baz</Value> </InlineValuesWithValueElement> </Array> <Group name="subgroup"> <Dimension name="Y" size="5"/> <Array name="ar"> <DataType>Float64</DataType> <DimensionRef ref="Y"/> <DimensionRef ref="/Y"/> </Array> </Group> </Group> </VRTDataset>""") ds = gdal.OpenEx(tmpfile, gdal.OF_MULTIDIM_RASTER | gdal.OF_UPDATE) rg = ds.GetRootGroup() ds = None attr = rg.CreateAttribute('foo', [], gdal.ExtendedDataType.CreateString()) attr.Write('bar') attr = None rg = None f = gdal.VSIFOpenL(tmpfile, 'rb') got_data = gdal.VSIFReadL(1, 10000, f).decode('ascii') gdal.VSIFCloseL(f) #print(got_data) assert got_data == """<VRTDataset> <Group name="/"> <Dimension name="X" size="3" /> <Dimension name="Y" size="4" indexingVariable="Y" /> <Attribute name="foo"> <DataType>String</DataType> <Value>bar</Value> </Attribute> <Array name="Y"> <DataType>Float64</DataType> <DimensionRef ref="Y" /> <RegularlySpacedValues start="0.5" increment="10.5" /> </Array> <Array name="ar"> <DataType>Float64</DataType> <DimensionRef ref="Y" /> <Dimension name="myX" size="3" /> <Source> <SourceFilename>data/byte.tif</SourceFilename> <SourceBand>1</SourceBand> <SourceSlab offset="1,1" count="2,2" step="2,1" /> <DestSlab offset="2,1" /> </Source> <Source> <SourceFilename>foo</SourceFilename> <SourceArray>the_array</SourceArray> <SourceTranspose>1,0</SourceTranspose> <SourceView>[...]</SourceView> <SourceSlab offset="0,0" count="0,0" step="1,1" /> <DestSlab offset="0,0" /> </Source> <ConstantValue offset="0,0" count="4,3">15</ConstantValue> <InlineValues offset="0,0" count="4,3">0 1 2 3 4 5 6 7 8 9 10 11</InlineValues> <Attribute name="bar"> <DataType>Int32</DataType> <Value>1</Value> </Attribute> </Array> <Array name="ar_string_no_dim"> <DataType>String</DataType> <InlineValuesWithValueElement> <Value>foo</Value> </InlineValuesWithValueElement> </Array> <Array name="ar_string_with_dim"> <DataType>String</DataType> <DimensionRef ref="X" /> <InlineValuesWithValueElement offset="0" count="3"> <Value>foo</Value> <Value>bar</Value> <Value>baz</Value> </InlineValuesWithValueElement> </Array> <Group name="subgroup"> <Dimension name="Y" size="5" /> <Array name="ar"> <DataType>Float64</DataType> <DimensionRef ref="Y" /> <DimensionRef ref="/Y" /> </Array> </Group> </Group> </VRTDataset> """ _validate(got_data) gdal.Unlink(tmpfile)
def vsicrypt_3(): if not gdaltest.has_vsicrypt: return 'skip' for options in [ 'sector_size=16', 'alg=AES', 'alg=DES_EDE2', 'alg=DES_EDE3', 'alg=SKIPJACK', 'alg=invalid', 'mode=CBC', 'mode=CFB', 'mode=OFB', 'mode=CTR', 'mode=CBC_CTS', 'mode=invalid', 'freetext=my_free_text', 'add_key_check=yes' ]: gdal.Unlink('/vsimem/file.bin') if options == 'alg=invalid' or options == 'mode=invalid': with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PRODDONT_USE_IN_PROD,%s,file=/vsimem/file.bin' % options, 'wb') else: fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PRODDONT_USE_IN_PROD,%s,file=/vsimem/file.bin' % options, 'wb') if fp is None: gdaltest.post_reason('fail') print(options) return 'fail' gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PRODDONT_USE_IN_PROD,file=/vsimem/file.bin', 'r') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) if content != 'hello': gdaltest.post_reason('fail') print(options) return 'fail' # Some of those algs might be missing for options in [ 'alg=Blowfish', 'alg=Camellia', 'alg=CAST256', 'alg=MARS', 'alg=IDEA', 'alg=RC5', 'alg=RC6', 'alg=Serpent', 'alg=SHACAL2', 'alg=Twofish', 'alg=XTEA' ]: gdal.Unlink('/vsimem/file.bin') with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,%s,file=/vsimem/file.bin' % options, 'wb') if fp is not None: gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) if content != 'hello': gdaltest.post_reason('fail') print(options) return 'fail' # Test key generation # Do NOT set VSICRYPT_CRYPTO_RANDOM=NO in production. This is just to speed up tests ! gdal.SetConfigOption("VSICRYPT_CRYPTO_RANDOM", "NO") fp = gdal.VSIFOpenL( '/vsicrypt/key=GENERATE_IT,add_key_check=yes,file=/vsimem/file.bin', 'wb') gdal.SetConfigOption("VSICRYPT_CRYPTO_RANDOM", None) # Get the generated random key key_b64 = gdal.GetConfigOption('VSICRYPT_KEY_B64') if key_b64 is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) if content != 'hello': gdaltest.post_reason('fail') print(options) return 'fail' gdal.SetConfigOption('VSICRYPT_KEY_B64', None) fp = gdal.VSIFOpenL('/vsicrypt/key_b64=%s,file=/vsimem/file.bin' % key_b64, 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) if content != 'hello': gdaltest.post_reason('fail') print(options) return 'fail' with gdaltest.error_handler(): statRes = gdal.VSIStatL('/vsicrypt//vsimem/file.bin') if statRes is not None: gdaltest.post_reason('fail') return 'fail' ret = gdal.Rename('/vsicrypt//vsimem/file.bin', '/vsicrypt//vsimem/subdir_crypt/file.bin') if ret != 0: gdaltest.post_reason('fail') print(ret) return 'fail' ret = gdal.Rename('/vsicrypt//vsimem/subdir_crypt/file.bin', '/vsimem/subdir_crypt/file2.bin') if ret != 0: gdaltest.post_reason('fail') print(ret) return 'fail' dir_content = gdal.ReadDir('/vsicrypt//vsimem/subdir_crypt') if dir_content != ['file2.bin']: gdaltest.post_reason('fail') print(dir_content) return 'fail' gdal.Unlink('/vsimem/subdir_crypt/file2.bin') return 'success'
def test_vrtmultidim_createmultidimensional(): tmpfile = '/vsimem/test.vrt' ds = gdal.GetDriverByName('VRT').CreateMultiDimensional(tmpfile) rg = ds.GetRootGroup() ds_other = gdal.GetDriverByName('VRT').CreateMultiDimensional('') dim_other = ds_other.GetRootGroup().CreateDimension('dim', '', '', 4) dim = rg.CreateDimension('dim', '', '', 3) assert dim with gdaltest.error_handler(): assert not rg.CreateDimension('', '', '', 1) assert not rg.CreateDimension('dim', '', '', 1) assert rg.CreateAttribute('attr', [1], gdal.ExtendedDataType.CreateString()) with gdaltest.error_handler(): assert not rg.CreateAttribute('', [1], gdal.ExtendedDataType.CreateString()) assert not rg.CreateAttribute('attr_2dim', [1, 2], gdal.ExtendedDataType.CreateString()) assert not rg.CreateAttribute('attr', [1], gdal.ExtendedDataType.CreateString()) assert not rg.CreateAttribute('attr_too_big', [4000 * 1000 * 1000], gdal.ExtendedDataType.CreateString()) ar = rg.CreateMDArray('ar', [dim], gdal.ExtendedDataType.Create(gdal.GDT_Float32)) assert ar[0] with gdaltest.error_handler(): assert not rg.CreateMDArray( '', [dim], gdal.ExtendedDataType.Create(gdal.GDT_Float32)) assert not rg.CreateMDArray( 'ar', [dim], gdal.ExtendedDataType.Create(gdal.GDT_Float32)) assert not rg.CreateMDArray( 'ar2', [dim_other], gdal.ExtendedDataType.Create(gdal.GDT_Float32)) assert ar.CreateAttribute('attr', [1], gdal.ExtendedDataType.CreateString()) with gdaltest.error_handler(): assert not ar.CreateAttribute('', [1], gdal.ExtendedDataType.CreateString()) assert not ar.CreateAttribute('attr', [1], gdal.ExtendedDataType.CreateString()) subg = rg.CreateGroup('subgroup') assert subg with gdaltest.error_handler(): assert not rg.CreateGroup('subgroup') assert not rg.CreateGroup('') ds.FlushCache() f = gdal.VSIFOpenL(tmpfile, 'rb') got_data = gdal.VSIFReadL(1, 10000, f).decode('ascii') gdal.VSIFCloseL(f) #print(got_data) assert got_data == """<VRTDataset> <Group name="/"> <Dimension name="dim" size="3" /> <Attribute name="attr"> <DataType>String</DataType> </Attribute> <Array name="ar"> <DataType>Float32</DataType> <DimensionRef ref="dim" /> <Attribute name="attr"> <DataType>String</DataType> </Attribute> </Array> <Group name="subgroup" /> </Group> </VRTDataset> """ _validate(got_data) gdal.Unlink(tmpfile)
def test_ogr_mapml_creation_options(): # Write a MapML file options = [ "HEAD=<title>My title</title>", "EXTENT_UNITS=OSMTILE", "EXTENT_ACTION=action", "EXTENT_XMIN=-123456789", "EXTENT_YMIN=-234567890", "EXTENT_XMAX=123456789", "EXTENT_YMAX=234567890", "EXTENT_XMIN_MIN=0", "EXTENT_XMIN_MAX=1", "EXTENT_YMIN_MIN=2", "EXTENT_YMIN_MAX=3", "EXTENT_XMAX_MIN=4", "EXTENT_XMAX_MAX=5", "EXTENT_YMAX_MIN=6", "EXTENT_YMAX_MAX=7", "EXTENT_ZOOM=18", "EXTENT_ZOOM_MIN=15", "EXTENT_ZOOM_MAX=20", "EXTENT_EXTRA=<foo/>", ] filename = '/vsimem/out.mapml' ds = ogr.GetDriverByName('MapML').CreateDataSource(filename, options=options) lyr = ds.CreateLayer('lyr') f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT (-180 0)')) lyr.CreateFeature(f) ds = None f = gdal.VSIFOpenL(filename, "rb") xml = gdal.VSIFReadL(1, 10000, f).decode('ascii') gdal.VSIFCloseL(f) assert xml == """<mapml> <head> <title>My title</title> </head> <body> <extent action="action" units="OSMTILE"> <input name="xmin" type="location" units="pcrs" axis="x" position="top-left" value="-123456789" min="0" max="1" /> <input name="ymin" type="location" units="pcrs" axis="y" position="bottom-right" value="-234567890" min="2" max="3" /> <input name="xmax" type="location" units="pcrs" axis="x" position="bottom-right" value="123456789" min="4" max="5" /> <input name="ymax" type="location" units="pcrs" axis="y" position="top-left" value="234567890" min="6" max="7" /> <input name="projection" type="hidden" value="OSMTILE" /> <input name="zoom" type="zoom" value="18" min="15" max="20" /> <foo /> </extent> <feature id="lyr.1" class="lyr"> <geometry> <point> <coordinates>-20037508.34 0.00</coordinates> </point> </geometry> </feature> </body> </mapml> """ gdal.Unlink(filename)
def vsicurl_test_redirect(): if gdaltest.is_travis_branch('trusty'): print('Skipped on trusty branch, but should be investigated') return 'skip' if gdaltest.webserver_port == 0: return 'skip' gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('GET', '/test_redirect/', 404) # Simulate a big time difference between server and local machine current_time = 1500 def method(request): response = 'HTTP/1.1 302\r\n' response += 'Server: foo\r\n' response += 'Date: ' + time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(current_time)) + '\r\n' response += 'Location: %s\r\n' % ( 'http://localhost:%d/foo.s3.amazonaws.com/test_redirected/test.bin?Signature=foo&Expires=%d' % (gdaltest.webserver_port, current_time + 30)) response += '\r\n' request.wfile.write(response.encode('ascii')) handler.add('HEAD', '/test_redirect/test.bin', custom_method=method) handler.add( 'HEAD', '/foo.s3.amazonaws.com/test_redirected/test.bin?Signature=foo&Expires=%d' % (current_time + 30), 403, {'Server': 'foo'}, '') def method(request): if 'Range' in request.headers: if request.headers['Range'] == 'bytes=0-16383': request.protocol_version = 'HTTP/1.1' request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Range', 'bytes 0-16383/1000000') request.send_header('Content-Length', 16384) request.send_header('Connection', 'close') request.end_headers() request.wfile.write(('x' * 16384).encode('ascii')) elif request.headers['Range'] == 'bytes=16384-49151': # Test expiration of the signed URL request.protocol_version = 'HTTP/1.1' request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() else: request.send_response(404) request.send_header('Content-Length', 0) request.end_headers() else: # After a failed attempt on a HEAD, the client should go there response = 'HTTP/1.1 200\r\n' response += 'Server: foo\r\n' response += 'Date: ' + time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(current_time)) + '\r\n' response += 'Content-type: text/plain\r\n' response += 'Content-Length: 1000000\r\n' response += 'Connection: close\r\n' response += '\r\n' request.wfile.write(response.encode('ascii')) handler.add( 'GET', '/foo.s3.amazonaws.com/test_redirected/test.bin?Signature=foo&Expires=%d' % (current_time + 30), custom_method=method) with webserver.install_http_handler(handler): f = gdal.VSIFOpenL( '/vsicurl/http://localhost:%d/test_redirect/test.bin' % gdaltest.webserver_port, 'rb') if f is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFSeekL(f, 0, 2) if gdal.VSIFTellL(f) != 1000000: gdaltest.post_reason('fail') print(gdal.VSIFTellL(f)) gdal.VSIFCloseL(f) return 'fail' gdal.VSIFSeekL(f, 0, 0) handler = webserver.SequentialHandler() handler.add( 'GET', '/foo.s3.amazonaws.com/test_redirected/test.bin?Signature=foo&Expires=%d' % (current_time + 30), custom_method=method) handler.add( 'GET', '/foo.s3.amazonaws.com/test_redirected/test.bin?Signature=foo&Expires=%d' % (current_time + 30), custom_method=method) current_time = int(time.time()) def method(request): # We should go there after expiration of the first signed URL if 'Range' in request.headers and \ request.headers['Range'] == 'bytes=16384-49151': request.protocol_version = 'HTTP/1.1' request.send_response(302) # Return a new signed URL request.send_header( 'Location', 'http://localhost:%d/foo.s3.amazonaws.com/test_redirected2/test.bin?Signature=foo&Expires=%d' % (request.server.port, current_time + 30)) request.send_header('Content-Length', 16384) request.end_headers() request.wfile.write(('x' * 16384).encode('ascii')) handler.add('GET', '/test_redirect/test.bin', custom_method=method) def method(request): # Second signed URL if 'Range' in request.headers and \ request.headers['Range'] == 'bytes=16384-49151': request.protocol_version = 'HTTP/1.1' request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Range', 'bytes 16384-16384/1000000') request.send_header('Content-Length', 1) request.end_headers() request.wfile.write('y'.encode('ascii')) handler.add( 'GET', '/foo.s3.amazonaws.com/test_redirected2/test.bin?Signature=foo&Expires=%d' % (current_time + 30), custom_method=method) with webserver.install_http_handler(handler): content = gdal.VSIFReadL(1, 16383, f).decode('ascii') if len(content) != 16383 or content[0] != 'x': gdaltest.post_reason('fail') print(content) gdal.VSIFCloseL(f) return 'fail' content = gdal.VSIFReadL(1, 2, f).decode('ascii') if content != 'xy': gdaltest.post_reason('fail') print(content) gdal.VSIFCloseL(f) return 'fail' gdal.VSIFCloseL(f) return 'success'
def vsifile_generic(filename): start_time = time.time() fp = gdal.VSIFOpenL(filename, 'wb+') if fp is None: gdaltest.post_reason('failure') return 'fail' if gdal.VSIFWriteL('0123456789', 1, 10, fp) != 10: gdaltest.post_reason('failure') return 'fail' if gdal.VSIFTruncateL(fp, 20) != 0: gdaltest.post_reason('failure') return 'fail' if gdal.VSIFTellL(fp) != 10: gdaltest.post_reason('failure') return 'fail' if gdal.VSIFTruncateL(fp, 5) != 0: gdaltest.post_reason('failure') return 'fail' if gdal.VSIFTellL(fp) != 10: gdaltest.post_reason('failure') return 'fail' if gdal.VSIFSeekL(fp, 0, 2) != 0: gdaltest.post_reason('failure') return 'fail' if gdal.VSIFTellL(fp) != 5: gdaltest.post_reason('failure') return 'fail' gdal.VSIFWriteL('XX', 1, 2, fp) gdal.VSIFCloseL(fp) statBuf = gdal.VSIStatL( filename, gdal.VSI_STAT_EXISTS_FLAG | gdal.VSI_STAT_NATURE_FLAG | gdal.VSI_STAT_SIZE_FLAG) if statBuf.size != 7: gdaltest.post_reason('failure') print(statBuf.size) return 'fail' if abs(start_time - statBuf.mtime) > 2: gdaltest.post_reason('failure') print(statBuf.mtime) return 'fail' fp = gdal.VSIFOpenL(filename, 'rb') buf = gdal.VSIFReadL(1, 7, fp) if gdal.VSIFWriteL('a', 1, 1, fp) != 0: gdaltest.post_reason('fail') return 'fail' if gdal.VSIFTruncateL(fp, 0) == 0: gdaltest.post_reason('fail') return 'fail' gdal.VSIFCloseL(fp) if buf.decode('ascii') != '01234XX': gdaltest.post_reason('failure') print(buf.decode('ascii')) return 'fail' # Test append mode on existing file fp = gdal.VSIFOpenL(filename, 'ab') gdal.VSIFWriteL('XX', 1, 2, fp) gdal.VSIFCloseL(fp) statBuf = gdal.VSIStatL( filename, gdal.VSI_STAT_EXISTS_FLAG | gdal.VSI_STAT_NATURE_FLAG | gdal.VSI_STAT_SIZE_FLAG) if statBuf.size != 9: gdaltest.post_reason('failure') print(statBuf.size) return 'fail' if gdal.Unlink(filename) != 0: gdaltest.post_reason('failure') return 'fail' statBuf = gdal.VSIStatL(filename, gdal.VSI_STAT_EXISTS_FLAG) if statBuf is not None: gdaltest.post_reason('failure') return 'fail' # Test append mode on non existing file fp = gdal.VSIFOpenL(filename, 'ab') gdal.VSIFWriteL('XX', 1, 2, fp) gdal.VSIFCloseL(fp) statBuf = gdal.VSIStatL( filename, gdal.VSI_STAT_EXISTS_FLAG | gdal.VSI_STAT_NATURE_FLAG | gdal.VSI_STAT_SIZE_FLAG) if statBuf.size != 2: gdaltest.post_reason('failure') print(statBuf.size) return 'fail' if gdal.Unlink(filename) != 0: gdaltest.post_reason('failure') return 'fail' return 'success'
def vsicrypt_6(): try: import ctypes except: return 'skip' import testnonboundtoswig testnonboundtoswig.testnonboundtoswig_init() if testnonboundtoswig.gdal_handle is None: return 'skip' testnonboundtoswig.gdal_handle.VSISetCryptKey.argtypes = [ ctypes.c_char_p, ctypes.c_int ] testnonboundtoswig.gdal_handle.VSISetCryptKey.restype = None # Set a valid key testnonboundtoswig.gdal_handle.VSISetCryptKey( 'DONT_USE_IN_PROD'.encode('ASCII'), 16) if not gdaltest.has_vsicrypt: return 'skip' fp = gdal.VSIFOpenL('/vsicrypt/add_key_check=yes,file=/vsimem/file.bin', 'wb+') if fp is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) if content != 'hello': gdaltest.post_reason('fail') return 'fail' fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'wb+') if fp is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) if content != 'hello': gdaltest.post_reason('fail') return 'fail' # Set a too short key testnonboundtoswig.gdal_handle.VSISetCryptKey('bbc'.encode('ASCII'), 3) with gdaltest.error_handler(): fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'rb') if fp is not None: gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'wb+') if fp is not None: gdaltest.post_reason('fail') return 'fail' # Erase key testnonboundtoswig.gdal_handle.VSISetCryptKey(None, 0) with gdaltest.error_handler(): fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'wb+') if fp is not None: gdaltest.post_reason('fail') return 'fail' gdal.Unlink('/vsimem/file.bin') return 'success'
def vsiswift_extra_1(): if not gdaltest.built_against_curl(): return 'skip' swift_resource = gdal.GetConfigOption('SWIFT_RESOURCE') if swift_resource is None: print('Missing SWIFT_RESOURCE for running gdaltest_list_extra') return 'skip' if swift_resource.find('/') < 0: path = '/vsiswift/' + swift_resource statres = gdal.VSIStatL(path) if statres is None or not stat.S_ISDIR(statres.mode): gdaltest.post_reason('fail') print('%s is not a valid bucket' % path) return 'fail' readdir = gdal.ReadDir(path) if readdir is None: gdaltest.post_reason('fail') print('ReadDir() should not return empty list') return 'fail' for filename in readdir: if filename != '.': subpath = path + '/' + filename if gdal.VSIStatL(subpath) is None: gdaltest.post_reason('fail') print('Stat(%s) should not return an error' % subpath) return 'fail' unique_id = 'vsiswift_test' subpath = path + '/' + unique_id ret = gdal.Mkdir(subpath, 0) if ret < 0: gdaltest.post_reason('fail') print('Mkdir(%s) should not return an error' % subpath) return 'fail' readdir = gdal.ReadDir(path) if unique_id not in readdir: gdaltest.post_reason('fail') print('ReadDir(%s) should contain %s' % (path, unique_id)) print(readdir) return 'fail' ret = gdal.Mkdir(subpath, 0) if ret == 0: gdaltest.post_reason('fail') print('Mkdir(%s) repeated should return an error' % subpath) return 'fail' ret = gdal.Rmdir(subpath) if ret < 0: gdaltest.post_reason('fail') print('Rmdir(%s) should not return an error' % subpath) return 'fail' readdir = gdal.ReadDir(path) if unique_id in readdir: gdaltest.post_reason('fail') print('ReadDir(%s) should not contain %s' % (path, unique_id)) print(readdir) return 'fail' ret = gdal.Rmdir(subpath) if ret == 0: gdaltest.post_reason('fail') print('Rmdir(%s) repeated should return an error' % subpath) return 'fail' ret = gdal.Mkdir(subpath, 0) if ret < 0: gdaltest.post_reason('fail') print('Mkdir(%s) should not return an error' % subpath) return 'fail' f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFWriteL('hello', 1, 5, f) gdal.VSIFCloseL(f) ret = gdal.Rmdir(subpath) if ret == 0: gdaltest.post_reason('fail') print('Rmdir(%s) on non empty directory should return an error' % subpath) return 'fail' f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb') if f is None: gdaltest.post_reason('fail') return 'fail' data = gdal.VSIFReadL(1, 5, f).decode('utf-8') if data != 'hello': gdaltest.post_reason('fail') print(data) return 'fail' gdal.VSIFCloseL(f) ret = gdal.Unlink(subpath + '/test.txt') if ret < 0: gdaltest.post_reason('fail') print('Unlink(%s) should not return an error' % (subpath + '/test.txt')) return 'fail' ret = gdal.Rmdir(subpath) if ret < 0: gdaltest.post_reason('fail') print('Rmdir(%s) should not return an error' % subpath) return 'fail' return 'success' f = open_for_read('/vsiswift/' + swift_resource) if f is None: gdaltest.post_reason('fail') return 'fail' ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if len(ret) != 1: gdaltest.post_reason('fail') print(ret) return 'fail' # Same with /vsiswift_streaming/ f = open_for_read('/vsiswift_streaming/' + swift_resource) if f is None: gdaltest.post_reason('fail') return 'fail' ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if len(ret) != 1: gdaltest.post_reason('fail') print(ret) return 'fail' # Invalid resource gdal.ErrorReset() f = open_for_read('/vsiswift_streaming/' + swift_resource + '/invalid_resource.baz') if f is not None: gdaltest.post_reason('fail') print(gdal.VSIGetLastErrorMsg()) return 'fail' return 'success'
def vsicurl_streaming_1(): try: drv = gdal.GetDriverByName('HTTP') except: drv = None if drv is None: return 'skip' fp = gdal.VSIFOpenL( '/vsicurl_streaming/http://download.osgeo.org/gdal/data/usgsdem/cded/114p01_0100_deme.dem', 'rb') if fp is None: if gdaltest.gdalurlopen( 'http://download.osgeo.org/gdal/data/usgsdem/cded/114p01_0100_deme.dem' ) is None: print('cannot open URL') return 'skip' gdaltest.post_reason('fail') return 'fail' if gdal.VSIFTellL(fp) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' data = gdal.VSIFReadL(1, 50, fp) if data.decode( 'ascii') != ' 114p01DEMe Base Ma': gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if gdal.VSIFTellL(fp) != 50: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFSeekL(fp, 0, 0) if gdal.VSIFTellL(fp) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' data = gdal.VSIFReadL(1, 50, fp) if data.decode( 'ascii') != ' 114p01DEMe Base Ma': gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if gdal.VSIFTellL(fp) != 50: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' time.sleep(0.5) gdal.VSIFSeekL(fp, 2001, 0) data_2001 = gdal.VSIFReadL(1, 20, fp) if data_2001.decode('ascii') != '7-32767-32767-32767-': print(data_2001) gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if gdal.VSIFTellL(fp) != 2001 + 20: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFSeekL(fp, 0, 2) if gdal.VSIFTellL(fp) != 9839616: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' nRet = len(gdal.VSIFReadL(1, 10, fp)) if nRet != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFSeekL(fp, 2001, 0) data_2001_2 = gdal.VSIFReadL(1, 20, fp) if gdal.VSIFTellL(fp) != 2001 + 20: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if data_2001 != data_2001_2: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFSeekL(fp, 1024 * 1024 + 100, 0) data = gdal.VSIFReadL(1, 20, fp) if data.decode('ascii') != '67-32767-32767-32767': print(data) gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if gdal.VSIFTellL(fp) != 1024 * 1024 + 100 + 20: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFCloseL(fp) return 'success'
def validate(ds, check_tiled=True, full_check=False): """Check if a file is a (Geo)TIFF with cloud optimized compatible structure. Args: ds: GDAL Dataset for the file to inspect. check_tiled: Set to False to ignore missing tiling. full_check: Set to TRUe to check tile/strip leader/trailer bytes. Might be slow on remote files Returns: A tuple, whose first element is an array of error messages (empty if there is no error), and the second element, a dictionary with the structure of the GeoTIFF file. Raises: ValidateCloudOptimizedGeoTIFFException: Unable to open the file or the file is not a Tiff. """ if int(gdal.VersionInfo('VERSION_NUM')) < 2020000: raise ValidateCloudOptimizedGeoTIFFException( 'GDAL 2.2 or above required') unicode_type = type(''.encode('utf-8').decode('utf-8')) if isinstance(ds, (str, unicode_type)): gdal.PushErrorHandler() ds = gdal.Open(ds) gdal.PopErrorHandler() if ds is None: raise ValidateCloudOptimizedGeoTIFFException( 'Invalid file : %s' % gdal.GetLastErrorMsg()) if ds.GetDriver().ShortName != 'GTiff': raise ValidateCloudOptimizedGeoTIFFException( 'The file is not a GeoTIFF') details = {} errors = [] warnings = [] filename = ds.GetDescription() main_band = ds.GetRasterBand(1) ovr_count = main_band.GetOverviewCount() filelist = ds.GetFileList() if filelist is not None and filename + '.ovr' in filelist: errors += [ 'Overviews found in external .ovr file. They should be internal' ] if main_band.XSize > 512 or main_band.YSize > 512: if check_tiled: block_size = main_band.GetBlockSize() if block_size[0] == main_band.XSize and block_size[0] > 1024: errors += [ 'The file is greater than 512xH or Wx512, but is not tiled' ] if ovr_count == 0: warnings += [ 'The file is greater than 512xH or Wx512, it is recommended ' 'to include internal overviews' ] ifd_offset = int(main_band.GetMetadataItem('IFD_OFFSET', 'TIFF')) ifd_offsets = [ifd_offset] block_order_row_major = False block_leader_size_as_uint4 = False block_trailer_last_4_bytes_repeated = False mask_interleaved_with_imagery = False if ifd_offset not in (8, 16): # Check if there is GDAL hidden structural metadata f = gdal.VSIFOpenL(filename, 'rb') if not f: raise ValidateCloudOptimizedGeoTIFFException("Cannot open file") signature = struct.unpack('B' * 4, gdal.VSIFReadL(4, 1, f)) bigtiff = signature in ((0x49, 0x49, 0x2B, 0x00), (0x4D, 0x4D, 0x00, 0x2B)) if bigtiff: expected_ifd_pos = 16 else: expected_ifd_pos = 8 gdal.VSIFSeekL(f, expected_ifd_pos, 0) pattern = "GDAL_STRUCTURAL_METADATA_SIZE=%06d bytes\n" % 0 got = gdal.VSIFReadL(len(pattern), 1, f).decode('LATIN1') if len(got) == len(pattern) and got.startswith( 'GDAL_STRUCTURAL_METADATA_SIZE='): size = int(got[len('GDAL_STRUCTURAL_METADATA_SIZE='):][0:6]) extra_md = gdal.VSIFReadL(size, 1, f).decode('LATIN1') block_order_row_major = 'BLOCK_ORDER=ROW_MAJOR' in extra_md block_leader_size_as_uint4 = 'BLOCK_LEADER=SIZE_AS_UINT4' in extra_md block_trailer_last_4_bytes_repeated = 'BLOCK_TRAILER=LAST_4_BYTES_REPEATED' in extra_md mask_interleaved_with_imagery = 'MASK_INTERLEAVED_WITH_IMAGERY=YES' in extra_md if 'KNOWN_INCOMPATIBLE_EDITION=YES' in extra_md: errors += [ "KNOWN_INCOMPATIBLE_EDITION=YES is declared in the file" ] expected_ifd_pos += len(pattern) + size expected_ifd_pos += expected_ifd_pos % 2 # IFD offset starts on a 2-byte boundary gdal.VSIFCloseL(f) if expected_ifd_pos != ifd_offsets[0]: errors += [ 'The offset of the main IFD should be %d. It is %d instead' % (expected_ifd_pos, ifd_offsets[0]) ] details['ifd_offsets'] = {} details['ifd_offsets']['main'] = ifd_offset for i in range(ovr_count): # Check that overviews are by descending sizes ovr_band = ds.GetRasterBand(1).GetOverview(i) if i == 0: if (ovr_band.XSize > main_band.XSize or ovr_band.YSize > main_band.YSize): errors += [ 'First overview has larger dimension than main band' ] else: prev_ovr_band = ds.GetRasterBand(1).GetOverview(i - 1) if (ovr_band.XSize > prev_ovr_band.XSize or ovr_band.YSize > prev_ovr_band.YSize): errors += [ 'Overview of index %d has larger dimension than ' 'overview of index %d' % (i, i - 1) ] if check_tiled: block_size = ovr_band.GetBlockSize() if block_size[0] == ovr_band.XSize and block_size[0] > 1024: errors += ['Overview of index %d is not tiled' % i] # Check that the IFD of descending overviews are sorted by increasing # offsets ifd_offset = int(ovr_band.GetMetadataItem('IFD_OFFSET', 'TIFF')) ifd_offsets.append(ifd_offset) details['ifd_offsets']['overview_%d' % i] = ifd_offset if ifd_offsets[-1] < ifd_offsets[-2]: if i == 0: errors += [ 'The offset of the IFD for overview of index %d is %d, ' 'whereas it should be greater than the one of the main ' 'image, which is at byte %d' % (i, ifd_offsets[-1], ifd_offsets[-2]) ] else: errors += [ 'The offset of the IFD for overview of index %d is %d, ' 'whereas it should be greater than the one of index %d, ' 'which is at byte %d' % (i, ifd_offsets[-1], i - 1, ifd_offsets[-2]) ] # Check that the imagery starts by the smallest overview and ends with # the main resolution dataset def get_block_offset(band): blockxsize, blockysize = band.GetBlockSize() for y in range(int((band.YSize + blockysize - 1) / blockysize)): for x in range(int((band.XSize + blockxsize - 1) / blockxsize)): block_offset = band.GetMetadataItem( 'BLOCK_OFFSET_%d_%d' % (x, y), 'TIFF') if block_offset: return int(block_offset) return 0 block_offset = get_block_offset(main_band) data_offsets = [block_offset] details['data_offsets'] = {} details['data_offsets']['main'] = block_offset for i in range(ovr_count): ovr_band = ds.GetRasterBand(1).GetOverview(i) block_offset = get_block_offset(ovr_band) data_offsets.append(block_offset) details['data_offsets']['overview_%d' % i] = block_offset if data_offsets[-1] != 0 and data_offsets[-1] < ifd_offsets[-1]: if ovr_count > 0: errors += [ 'The offset of the first block of the smallest overview ' 'should be after its IFD' ] else: errors += [ 'The offset of the first block of the image should ' 'be after its IFD' ] for i in range(len(data_offsets) - 2, 0, -1): if data_offsets[i] != 0 and data_offsets[i] < data_offsets[i + 1]: errors += [ 'The offset of the first block of overview of index %d should ' 'be after the one of the overview of index %d' % (i - 1, i) ] if len(data_offsets) >= 2 and data_offsets[0] != 0 and data_offsets[ 0] < data_offsets[1]: errors += [ 'The offset of the first block of the main resolution image ' 'should be after the one of the overview of index %d' % (ovr_count - 1) ] if full_check and (block_order_row_major or block_leader_size_as_uint4 or block_trailer_last_4_bytes_repeated or mask_interleaved_with_imagery): f = gdal.VSIFOpenL(filename, 'rb') if not f: raise ValidateCloudOptimizedGeoTIFFException("Cannot open file") full_check_band(f, 'Main resolution image', main_band, errors, block_order_row_major, block_leader_size_as_uint4, block_trailer_last_4_bytes_repeated, mask_interleaved_with_imagery) if main_band.GetMaskFlags() == gdal.GMF_PER_DATASET and \ (filename + '.msk') not in ds.GetFileList(): full_check_band(f, 'Mask band of main resolution image', main_band.GetMaskBand(), errors, block_order_row_major, block_leader_size_as_uint4, block_trailer_last_4_bytes_repeated, False) for i in range(ovr_count): ovr_band = ds.GetRasterBand(1).GetOverview(i) full_check_band(f, 'Overview %d' % i, ovr_band, errors, block_order_row_major, block_leader_size_as_uint4, block_trailer_last_4_bytes_repeated, mask_interleaved_with_imagery) if ovr_band.GetMaskFlags() == gdal.GMF_PER_DATASET and \ (filename + '.msk') not in ds.GetFileList(): full_check_band(f, 'Mask band of overview %d' % i, ovr_band.GetMaskBand(), errors, block_order_row_major, block_leader_size_as_uint4, block_trailer_last_4_bytes_repeated, False) gdal.VSIFCloseL(f) return warnings, errors, details
def vrt_read_10(): src_ds = gdal.Open('data/byte.tif') mem_ds = gdal.GetDriverByName('GTiff').CreateCopy( '/vsimem/vrt_read_10.tif', src_ds) vrt_ds = gdal.GetDriverByName('VRT').CreateCopy('/vsimem/vrt_read_10.vrt', mem_ds) vrt_hist = vrt_ds.GetRasterBand(1).GetHistogram() mem_hist = mem_ds.GetRasterBand(1).GetHistogram() mem_ds = None vrt_ds = None f = gdal.VSIFOpenL('/vsimem/vrt_read_10.vrt', 'rb') content = gdal.VSIFReadL(1, 10000, f).decode('ascii') gdal.VSIFCloseL(f) if vrt_hist != mem_hist: gdaltest.post_reason('fail') print(vrt_hist) print(mem_hist) return 'fail' if content.find('<Histograms>') < 0: gdaltest.post_reason('fail') print(content) return 'fail' # Single source optimization for i in range(2): gdal.FileFromMemBuffer( '/vsimem/vrt_read_10.vrt', """<VRTDataset rasterXSize="20" rasterYSize="20"> <VRTRasterBand dataType="Byte" band="1"> <SimpleSource> <SourceFilename relativeToVRT="1">vrt_read_10.tif</SourceFilename> </SimpleSource> </VRTRasterBand> </VRTDataset>""") ds = gdal.Open('/vsimem/vrt_read_10.vrt') if i == 0: ds.GetRasterBand(1).GetDefaultHistogram() else: ds.GetRasterBand(1).GetHistogram() ds = None f = gdal.VSIFOpenL('/vsimem/vrt_read_10.vrt', 'rb') content = gdal.VSIFReadL(1, 10000, f).decode('ascii') gdal.VSIFCloseL(f) if content.find('<Histograms>') < 0: gdaltest.post_reason('fail') print(content) return 'fail' # Two sources general case for i in range(2): gdal.FileFromMemBuffer( '/vsimem/vrt_read_10.vrt', """<VRTDataset rasterXSize="20" rasterYSize="20"> <VRTRasterBand dataType="Byte" band="1"> <SimpleSource> <SourceFilename relativeToVRT="1">vrt_read_10.tif</SourceFilename> </SimpleSource> <SimpleSource> <SourceFilename relativeToVRT="1">vrt_read_10.tif</SourceFilename> </SimpleSource> </VRTRasterBand> </VRTDataset>""") ds = gdal.Open('/vsimem/vrt_read_10.vrt') if i == 0: ds.GetRasterBand(1).GetDefaultHistogram() else: ds.GetRasterBand(1).GetHistogram() ds = None f = gdal.VSIFOpenL('/vsimem/vrt_read_10.vrt', 'rb') content = gdal.VSIFReadL(1, 10000, f).decode('ascii') gdal.VSIFCloseL(f) if content.find('<Histograms>') < 0: gdaltest.post_reason('fail') print(content) return 'fail' gdal.GetDriverByName('GTiff').Delete('/vsimem/vrt_read_10.tif') gdal.GetDriverByName('VRT').Delete('/vsimem/vrt_read_10.vrt') return 'success'
def test_ecrgtoc_1(): toc_xml = """<Table_of_Contents> <file_header file_status="new"> <file_name>TOC.xml</file_name> </file_header> <product product_title="ProductTitle"> <disc id="DiscId"> <frame_list number_of_frames="2"> <scale size="1:500 K"> <frame name="000000009s0013.lf2"> <frame_path>clfc\\2</frame_path> <frame_version>001</frame_version> <frame_chart_type>lf</frame_chart_type> <frame_zone>2</frame_zone> </frame> <frame name="000000009t0013.lf2"> <frame_path>clfc\\2</frame_path> <frame_version>001</frame_version> <frame_chart_type>lf</frame_chart_type> <frame_zone>2</frame_zone> </frame> </scale> </frame_list> </disc> </product> <extension_list> <extension code="LF"> <chart_code>LF</chart_code> <chart_type>1:500 K (LFC Day)</chart_type> <chart_scale>1:500 K</chart_scale> <chart_description>LFC Day</chart_description> </extension> </extension_list> </Table_of_Contents>""" f = gdal.VSIFOpenL('/vsimem/TOC.xml', 'wb') gdal.VSIFWriteL(toc_xml, 1, len(toc_xml), f) gdal.VSIFCloseL(f) ds = gdal.Open('/vsimem/TOC.xml') assert ds is not None expected_gt = [ -85.43147208121826, 0.00059486040609137061, 0.0, 33.166986564299428, 0.0, -0.00044985604606525913 ] gt = ds.GetGeoTransform() for i in range(6): if abs(gt[i] - expected_gt[i]) > 1e-10: gdaltest.post_reason('did not get expected geotransform') print(gt) wkt = ds.GetProjectionRef() assert wkt.find('WGS 84') != -1, 'did not get expected SRS' filelist = ds.GetFileList() assert len(filelist) == 3, 'did not get expected filelist' ds2 = gdal.GetDriverByName('NITF').Create( '/vsimem/clfc/2/000000009s0013.lf2', 2304, 2304, 3, options=[ 'ICORDS=G', 'TRE=GEOLOB=000605184000800256-85.43147208122+33.16698656430' ]) ds2.SetGeoTransform([ -85.43147208122, 0.00059486040609137061, 0.0, 33.16698656430, 0.0, -0.00044985604606525913 ]) ds2.SetProjection(wkt) ds2.GetRasterBand(1).Fill(255) ds2 = None ds2 = gdal.GetDriverByName('NITF').Create( '/vsimem/clfc/2/000000009t0013.lf2', 2304, 2304, 3, options=[ 'ICORDS=G', 'TRE=GEOLOB=000605184000800256-84.06091370558+33.16698656430' ]) ds2.SetGeoTransform([ -84.06091370558, 0.00059486040609137061, 0.0, 33.16698656430, 0.0, -0.00044985604606525913 ]) ds2.SetProjection(wkt) ds2 = None cs = ds.GetRasterBand(1).Checksum() ds = None assert cs == 5966, 'bad checksum'
def vsiaz_fake_write(): if gdaltest.webserver_port == 0: return 'skip' gdal.VSICurlClearCache() # Test creation of BlockBob f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' handler = webserver.SequentialHandler() def method(request): h = request.headers if 'Authorization' not in h or \ h['Authorization'] != 'SharedKey myaccount:AigkrY7q66WCrx3JRKBte56k7kxV2cxB/ZyGNubxk5I=' or \ 'Expect' not in h or h['Expect'] != '100-continue' or \ 'Content-Length' not in h or h['Content-Length'] != '40000' or \ 'x-ms-date' not in h or h['x-ms-date'] != 'my_timestamp' or \ 'x-ms-blob-type' not in h or h['x-ms-blob-type'] != 'BlockBlob': sys.stderr.write('Bad headers: %s\n' % str(h)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii')) content = request.rfile.read(40000).decode('ascii') if len(content) != 40000: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(201) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', custom_method=method) with webserver.install_http_handler(handler): ret = gdal.VSIFWriteL('x' * 35000, 1, 35000, f) ret += gdal.VSIFWriteL('x' * 5000, 1, 5000, f) if ret != 40000: gdaltest.post_reason('fail') print(ret) gdal.VSIFCloseL(f) return 'fail' gdal.VSIFCloseL(f) # Simulate illegal read f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): ret = gdal.VSIFReadL(1, 1, f) if ret: gdaltest.post_reason('fail') print(ret) return 'fail' gdal.VSIFCloseL(f) # Simulate illegal seek f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): ret = gdal.VSIFSeekL(f, 1, 0) if ret == 0: gdaltest.post_reason('fail') return 'fail' gdal.VSIFCloseL(f) # Simulate failure when putting BlockBob f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', custom_method=method) if gdal.VSIFSeekL(f, 0, 0) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(f) return 'fail' gdal.VSIFWriteL('x' * 35000, 1, 35000, f) if gdal.VSIFTellL(f) != 35000: gdaltest.post_reason('fail') gdal.VSIFCloseL(f) return 'fail' if gdal.VSIFSeekL(f, 35000, 0) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(f) return 'fail' if gdal.VSIFSeekL(f, 0, 1) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(f) return 'fail' if gdal.VSIFSeekL(f, 0, 2) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(f) return 'fail' if gdal.VSIFEofL(f) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(f) return 'fail' with webserver.install_http_handler(handler): with gdaltest.error_handler(): ret = gdal.VSIFCloseL(f) if ret == 0: gdaltest.post_reason('fail') print(ret) gdal.VSIFCloseL(f) return 'fail' # Simulate creation of BlockBob over an existing blob of incompatible type f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' handler = webserver.SequentialHandler() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', 409) handler.add('DELETE', '/azure/blob/myaccount/test_copy/file.bin', 202) handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', 201) with webserver.install_http_handler(handler): gdal.VSIFCloseL(f) # Test creation of AppendBlob gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', '10') f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', None) if f is None: gdaltest.post_reason('fail') return 'fail' handler = webserver.SequentialHandler() def method(request): h = request.headers if 'Authorization' not in h or \ h['Authorization'] != 'SharedKey myaccount:KimVui3ptY9D5ftLlsI7CNOgK36CNAEzsXqcuHskdEY=' or \ 'Content-Length' not in h or h['Content-Length'] != '0' or \ 'x-ms-date' not in h or h['x-ms-date'] != 'my_timestamp' or \ 'x-ms-blob-type' not in h or h['x-ms-blob-type'] != 'AppendBlob': sys.stderr.write('Bad headers: %s\n' % str(h)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.send_response(201) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', custom_method=method) def method(request): h = request.headers if 'Content-Length' not in h or h['Content-Length'] != '10' or \ 'x-ms-date' not in h or h['x-ms-date'] != 'my_timestamp' or \ 'x-ms-blob-type' not in h or h['x-ms-blob-type'] != 'AppendBlob': sys.stderr.write('Bad headers: %s\n' % str(h)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' content = request.rfile.read(10).decode('ascii') if content != '0123456789': sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(201) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin?comp=appendblock', custom_method=method) def method(request): h = request.headers if 'Content-Length' not in h or h['Content-Length'] != '6' or \ 'x-ms-date' not in h or h['x-ms-date'] != 'my_timestamp' or \ 'x-ms-blob-type' not in h or h['x-ms-blob-type'] != 'AppendBlob': sys.stderr.write('Bad headers: %s\n' % str(h)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' content = request.rfile.read(6).decode('ascii') if content != 'abcdef': sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(201) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin?comp=appendblock', custom_method=method) with webserver.install_http_handler(handler): ret = gdal.VSIFWriteL('0123456789abcdef', 1, 16, f) if ret != 16: gdaltest.post_reason('fail') print(ret) gdal.VSIFCloseL(f) return 'fail' gdal.VSIFCloseL(f) # Test failed creation of AppendBlob gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', '10') f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', None) if f is None: gdaltest.post_reason('fail') return 'fail' handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', custom_method=method) with webserver.install_http_handler(handler): with gdaltest.error_handler(): ret = gdal.VSIFWriteL('0123456789abcdef', 1, 16, f) if ret != 0: gdaltest.post_reason('fail') print(ret) gdal.VSIFCloseL(f) return 'fail' gdal.VSIFCloseL(f) # Test failed writing of a block of an AppendBlob gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', '10') f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', None) if f is None: gdaltest.post_reason('fail') return 'fail' handler = webserver.SequentialHandler() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', 201) handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin?comp=appendblock', 403) with webserver.install_http_handler(handler): with gdaltest.error_handler(): ret = gdal.VSIFWriteL('0123456789abcdef', 1, 16, f) if ret != 0: gdaltest.post_reason('fail') print(ret) gdal.VSIFCloseL(f) return 'fail' gdal.VSIFCloseL(f) return 'success'
def __init__(self, filename, access): self.f = gdal.VSIFOpenL(filename, access)
def vsiaz_extra_1(): if not gdaltest.built_against_curl(): return 'skip' az_resource = gdal.GetConfigOption('AZ_RESOURCE') if az_resource is None: print('Missing AZ_RESOURCE for running gdaltest_list_extra') return 'skip' if az_resource.find('/') < 0: path = '/vsiaz/' + az_resource statres = gdal.VSIStatL(path) if statres is None or not stat.S_ISDIR(statres.mode): gdaltest.post_reason('fail') print('%s is not a valid bucket' % path) return 'fail' readdir = gdal.ReadDir(path) if readdir is None: gdaltest.post_reason('fail') print('ReadDir() should not return empty list') return 'fail' for filename in readdir: if filename != '.': subpath = path + '/' + filename if gdal.VSIStatL(subpath) is None: gdaltest.post_reason('fail') print('Stat(%s) should not return an error' % subpath) return 'fail' unique_id = 'vsiaz_test' subpath = path + '/' + unique_id ret = gdal.Mkdir(subpath, 0) if ret < 0: gdaltest.post_reason('fail') print('Mkdir(%s) should not return an error' % subpath) return 'fail' readdir = gdal.ReadDir(path) if unique_id not in readdir: gdaltest.post_reason('fail') print('ReadDir(%s) should contain %s' % (path, unique_id)) print(readdir) return 'fail' ret = gdal.Mkdir(subpath, 0) if ret == 0: gdaltest.post_reason('fail') print('Mkdir(%s) repeated should return an error' % subpath) return 'fail' ret = gdal.Rmdir(subpath) if ret < 0: gdaltest.post_reason('fail') print('Rmdir(%s) should not return an error' % subpath) return 'fail' readdir = gdal.ReadDir(path) if unique_id in readdir: gdaltest.post_reason('fail') print('ReadDir(%s) should not contain %s' % (path, unique_id)) print(readdir) return 'fail' ret = gdal.Rmdir(subpath) if ret == 0: gdaltest.post_reason('fail') print('Rmdir(%s) repeated should return an error' % subpath) return 'fail' ret = gdal.Mkdir(subpath, 0) if ret < 0: gdaltest.post_reason('fail') print('Mkdir(%s) should not return an error' % subpath) return 'fail' f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFWriteL('hello', 1, 5, f) gdal.VSIFCloseL(f) ret = gdal.Rmdir(subpath) if ret == 0: gdaltest.post_reason('fail') print('Rmdir(%s) on non empty directory should return an error' % subpath) return 'fail' f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb') if f is None: gdaltest.post_reason('fail') return 'fail' data = gdal.VSIFReadL(1, 5, f).decode('utf-8') if data != 'hello': gdaltest.post_reason('fail') print(data) return 'fail' gdal.VSIFCloseL(f) ret = gdal.Unlink(subpath + '/test.txt') if ret < 0: gdaltest.post_reason('fail') print('Unlink(%s) should not return an error' % (subpath + '/test.txt')) return 'fail' ret = gdal.Rmdir(subpath) if ret < 0: gdaltest.post_reason('fail') print('Rmdir(%s) should not return an error' % subpath) return 'fail' return 'success' f = open_for_read('/vsiaz/' + az_resource) if f is None: gdaltest.post_reason('fail') return 'fail' ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if len(ret) != 1: gdaltest.post_reason('fail') print(ret) return 'fail' # Same with /vsiaz_streaming/ f = open_for_read('/vsiaz_streaming/' + az_resource) if f is None: gdaltest.post_reason('fail') return 'fail' ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if len(ret) != 1: gdaltest.post_reason('fail') print(ret) return 'fail' if False: # pylint: disable=using-constant-test # we actually try to read at read() time and bSetError = false # Invalid bucket : "The specified bucket does not exist" gdal.ErrorReset() f = open_for_read('/vsiaz/not_existing_bucket/foo') with gdaltest.error_handler(): gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if gdal.VSIGetLastErrorMsg() == '': gdaltest.post_reason('fail') print(gdal.VSIGetLastErrorMsg()) return 'fail' # Invalid resource gdal.ErrorReset() f = open_for_read('/vsiaz_streaming/' + az_resource + '/invalid_resource.baz') if f is not None: gdaltest.post_reason('fail') print(gdal.VSIGetLastErrorMsg()) return 'fail' # Test GetSignedURL() signed_url = gdal.GetSignedURL('/vsiaz/' + az_resource) f = open_for_read('/vsicurl_streaming/' + signed_url) if f is None: gdaltest.post_reason('fail') return 'fail' ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if len(ret) != 1: gdaltest.post_reason('fail') print(ret) return 'fail' return 'success'
def test_eedai_4(): if gdaltest.eedai_drv is None: pytest.skip() gdal.FileFromMemBuffer( '/vsimem/ee/projects/earthengine-public/assets/image', json.dumps({ 'type': 'IMAGE', 'bands': [{ "id": "B1", "dataType": { "precision": "INT", "range": { "max": 255 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }, { "id": "B2", "dataType": { "precision": "INT", "range": { "max": 255 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }, { "id": "B3", "dataType": { "precision": "INT", "range": { "max": 255 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }] })) gdal.SetConfigOption('EEDA_BEARER', 'mybearer') gdal.SetConfigOption('EEDA_URL', '/vsimem/ee/') ds = gdal.Open('EEDAI:image') gdal.SetConfigOption('EEDA_URL', None) mem_ds = gdal.GetDriverByName('MEM').Create('', 256, 256, 3) mem_ds.GetRasterBand(1).Fill(127) mem_ds.GetRasterBand(2).Fill(128) mem_ds.GetRasterBand(3).Fill(129) gdal.GetDriverByName('PNG').CreateCopy('/vsimem/out.png', mem_ds) f = gdal.VSIFOpenL('/vsimem/out.png', 'rb') png_data = gdal.VSIFReadL(1, 1000000, f) gdal.VSIFCloseL(f) gdal.Unlink('/vsimem/out.png') gdal.FileFromMemBuffer( '/vsimem/ee/projects/earthengine-public/assets/image:getPixels&CUSTOMREQUEST=POST&POSTFIELDS={ "fileFormat": "PNG", "bandIds": [ "B1", "B2", "B3" ], "grid": { "affineTransform": { "translateX": 499980.0, "translateY": 4200000.0, "scaleX": 60.0, "scaleY": -60.0, "shearX": 0.0, "shearY": 0.0 }, "dimensions": { "width": 256, "height": 256 } } }', png_data) got_data = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) got_data = struct.unpack('B', got_data)[0] assert got_data == 127 # Same with dataset RasterIO got_data = ds.ReadRaster(0, 0, 1, 1) got_data = struct.unpack('B' * 3, got_data) assert got_data == (127, 128, 129) # Same after flushing cache ds.FlushCache() got_data = ds.ReadRaster(0, 0, 1, 1) got_data = struct.unpack('B' * 3, got_data) assert got_data == (127, 128, 129) # Sub-sampled query gdal.FileFromMemBuffer( '/vsimem/ee/projects/earthengine-public/assets/image:getPixels&CUSTOMREQUEST=POST&POSTFIELDS={ "fileFormat": "PNG", "bandIds": [ "B1", "B2", "B3" ], "grid": { "affineTransform": { "translateX": 499980.0, "translateY": 4200000.0, "scaleX": 120.0, "scaleY": -120.06557377049181, "shearX": 0.0, "shearY": 0.0 }, "dimensions": { "width": 256, "height": 256 } } }', png_data) got_data = ds.GetRasterBand(1).ReadRaster(0, 0, 2, 2, buf_xsize=1, buf_ysize=1) got_data = struct.unpack('B', got_data)[0] assert got_data == 127 # Same after flushing cache with dataset RasterIO ds.FlushCache() got_data = ds.ReadRaster(0, 0, 2, 2, buf_xsize=1, buf_ysize=1) got_data = struct.unpack('B' * 3, got_data) assert got_data == (127, 128, 129) ds = None gdal.SetConfigOption('EEDA_BEARER', None)
def ogr_tiger_4(): if ogrtest.tiger_ds is None: return 'skip' # load all the files into memory. for filename in gdal.ReadDir('tmp/cache/TGR01001'): if filename.startswith('.'): continue data = open('tmp/cache/TGR01001/' + filename, 'r').read() f = gdal.VSIFOpenL('/vsimem/tigertest/' + filename, 'wb') gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) # Try reading. ogrtest.tiger_ds = ogr.Open('/vsimem/tigertest/TGR01001.RT1') if ogrtest.tiger_ds is None: gdaltest.post_reason('fail to open.') return 'fail' ogrtest.tiger_ds = None # also test opening with a filename (#4443) ogrtest.tiger_ds = ogr.Open('tmp/cache/TGR01001/TGR01001.RT1') if ogrtest.tiger_ds is None: gdaltest.post_reason('fail') return 'fail' # Check a few features. cc_layer = ogrtest.tiger_ds.GetLayerByName('CompleteChain') if cc_layer.GetFeatureCount() != 19289: gdaltest.post_reason('wrong cc feature count') return 'fail' feat = cc_layer.GetNextFeature() feat = cc_layer.GetNextFeature() feat = cc_layer.GetNextFeature() if feat.TLID != 2833200 or feat.FRIADDL is not None or feat.BLOCKL != 5000: gdaltest.post_reason('wrong attribute on cc feature.') return 'fail' if ogrtest.check_feature_geometry( feat, 'LINESTRING (-86.4402 32.504137,-86.440313 32.504009,-86.440434 32.503884,-86.440491 32.503805,-86.44053 32.503757,-86.440578 32.503641,-86.440593 32.503515,-86.440588 32.503252,-86.440596 32.50298)', max_error=0.000001) != 0: return 'fail' feat = ogrtest.tiger_ds.GetLayerByName('TLIDRange').GetNextFeature() if feat.MODULE != 'TGR01001' or feat.TLMINID != 2822718: gdaltest.post_reason('got wrong TLIDRange attributes') return 'fail' # Try to recover memory from /vsimem. for filename in gdal.ReadDir('tmp/cache/TGR01001'): if filename.startswith('.'): continue gdal.Unlink('/vsimem/tigertest/' + filename) return 'success'
def test_vsifile_5(): fp = gdal.VSIFOpenL('tmp/vsifile_5.bin', 'wb') ref_data = ''.join(['%08X' % i for i in range(5 * 32768)]) gdal.VSIFWriteL(ref_data, 1, len(ref_data), fp) gdal.VSIFCloseL(fp) gdal.SetConfigOption('VSI_CACHE', 'YES') for i in range(3): if i == 0: gdal.SetConfigOption('VSI_CACHE_SIZE', '0') elif i == 1: gdal.SetConfigOption('VSI_CACHE_SIZE', '65536') else: gdal.SetConfigOption('VSI_CACHE_SIZE', None) fp = gdal.VSIFOpenL('tmp/vsifile_5.bin', 'rb') gdal.VSIFSeekL(fp, 50000, 0) if gdal.VSIFTellL(fp) != 50000: gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) pytest.fail() gdal.VSIFSeekL(fp, 50000, 1) if gdal.VSIFTellL(fp) != 100000: gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) pytest.fail() gdal.VSIFSeekL(fp, 0, 2) if gdal.VSIFTellL(fp) != 5 * 32768 * 8: gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) pytest.fail() gdal.VSIFReadL(1, 1, fp) gdal.VSIFSeekL(fp, 0, 0) data = gdal.VSIFReadL(1, 3 * 32768, fp) if data.decode('ascii') != ref_data[0:3 * 32768]: gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) pytest.fail() gdal.VSIFSeekL(fp, 16384, 0) data = gdal.VSIFReadL(1, 5 * 32768, fp) if data.decode('ascii') != ref_data[16384:16384 + 5 * 32768]: gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) pytest.fail() data = gdal.VSIFReadL(1, 50 * 32768, fp) if data[0:1130496].decode('ascii') != ref_data[16384 + 5 * 32768:]: gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) pytest.fail() gdal.VSIFCloseL(fp) gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) gdal.Unlink('tmp/vsifile_5.bin')
def extract_tile(ds, src_band_nbr, tile_x, tile_y, jpg_filename): block_offset = ds.GetRasterBand(src_band_nbr).GetMetadataItem( 'BLOCK_OFFSET_%d_%d' % (tile_x, tile_y), 'TIFF') block_size = ds.GetRasterBand(src_band_nbr).GetMetadataItem( 'BLOCK_SIZE_%d_%d' % (tile_x, tile_y), 'TIFF') if block_offset is None or block_size is None: print('ERROR: Cannot find block (%d,%d)' % (tile_x, tile_y)) return 1 jpegtables = ds.GetRasterBand(src_band_nbr).GetMetadataItem( 'JPEGTABLES', 'TIFF') if jpegtables is not None: if (len(jpegtables) % 2 ) != 0 or jpegtables[0:4] != 'FFD8' or jpegtables[-2:] != 'D9': print('ERROR: Invalid JPEG tables') print(jpegtables) return 1 # Remove final D9 jpegtables = jpegtables[0:-2] tiff_f = gdal.VSIFOpenL(ds.GetDescription(), 'rb') if tiff_f is None: print('ERROR: Cannot reopen %s' % ds.GetDescription()) return 1 out_f = gdal.VSIFOpenL(jpg_filename, 'wb') if out_f is None: print('ERROR: Cannot create %s' % jpg_filename) gdal.VSIFCloseL(tiff_f) return 1 # Write JPEG tables if jpegtables is not None: for i in range(int(len(jpegtables) / 2)): c1 = ord(jpegtables[2 * i]) c2 = ord(jpegtables[2 * i + 1]) if c1 >= ord('0') and c1 <= ord('9'): val = c1 - ord('0') else: val = (c1 - ord('A')) + 10 val = val * 16 if c2 >= ord('0') and c2 <= ord('9'): val = val + (c2 - ord('0')) else: val = val + (c2 - ord('A')) + 10 gdal.VSIFWriteL(chr(val), 1, 1, out_f) else: gdal.VSIFWriteL(chr(0xFF), 1, 1, out_f) gdal.VSIFWriteL(chr(0xD8), 1, 1, out_f) # Write Adobe APP14 marker if necessary interleave = ds.GetMetadataItem('INTERLEAVE', 'IMAGE_STRUCTURE') photometric = ds.GetMetadataItem('COMPRESSION', 'IMAGE_STRUCTURE') if interleave == 'PIXEL' and photometric == 'JPEG' and ds.RasterCount == 3: adobe_app14 = [ 0xFF, 0xEE, 0x00, 0x0E, 0x41, 0x64, 0x6F, 0x62, 0x65, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00 ] for c in adobe_app14: gdal.VSIFWriteL(chr(c), 1, 1, out_f) # Write JPEG codestream # skip leading 0xFF 0xD8 gdal.VSIFSeekL(tiff_f, int(block_offset) + 2, 0) data = gdal.VSIFReadL(1, int(block_size) - 2, tiff_f) gdal.VSIFCloseL(tiff_f) gdal.VSIFWriteL(data, 1, len(data), out_f) gdal.VSIFCloseL(out_f) aux_xml_filename = '%s.aux.xml' % jpg_filename gt = ds.GetGeoTransform() srs = ds.GetProjectionRef() if srs is not None and srs != '': sub_gt = [gt[i] for i in range(6)] (blockxsize, blockysize) = ds.GetRasterBand(1).GetBlockSize() sub_gt[0] = gt[0] + tile_x * blockxsize * gt[1] sub_gt[3] = gt[3] + tile_y * blockysize * gt[5] out_f = gdal.VSIFOpenL(aux_xml_filename, 'wb') if out_f is None: print('ERROR: Cannot create %s' % aux_xml_filename) return 1 content = """<PAMDataset> <SRS>%s</SRS> <GeoTransform>%.18g,%.18g,%.18g,%.18g,%.18g,%.18g</GeoTransform> </PAMDataset> """ % (srs, sub_gt[0], sub_gt[1], sub_gt[2], sub_gt[3], sub_gt[4], sub_gt[5]) gdal.VSIFWriteL(content, 1, len(content), out_f) gdal.VSIFCloseL(out_f) else: gdal.Unlink('%s.aux.xml' % jpg_filename) return 0
def test_vsifile_13(): gdal.VSIFOpenL('/vsigzip', 'rb') gdal.VSIFOpenL('/vsizip', 'rb') gdal.VSIFOpenL('/vsitar', 'rb') gdal.VSIFOpenL('/vsimem', 'rb') gdal.VSIFOpenL('/vsisparse', 'rb') gdal.VSIFOpenL('/vsisubfile', 'rb') gdal.VSIFOpenL('/vsicurl', 'rb') gdal.VSIFOpenL('/vsis3', 'rb') gdal.VSIFOpenL('/vsicurl_streaming', 'rb') gdal.VSIFOpenL('/vsis3_streaming', 'rb') gdal.VSIFOpenL('/vsistdin', 'rb') fp = gdal.VSIFOpenL('/vsistdout', 'wb') if fp is not None: gdal.VSIFCloseL(fp) gdal.VSIStatL('/vsigzip') gdal.VSIStatL('/vsizip') gdal.VSIStatL('/vsitar') gdal.VSIStatL('/vsimem') gdal.VSIStatL('/vsisparse') gdal.VSIStatL('/vsisubfile') gdal.VSIStatL('/vsicurl') gdal.VSIStatL('/vsis3') gdal.VSIStatL('/vsicurl_streaming') gdal.VSIStatL('/vsis3_streaming') gdal.VSIStatL('/vsistdin') gdal.VSIStatL('/vsistdout')
def test_vsifile_14(): with gdaltest.error_handler(): gdal.VSIFOpenL('/vsitar//vsitar//vsitar//vsitar//vsitar//vsitar//vsitar//vsitar/a.tgzb.tgzc.tgzd.tgze.tgzf.tgz.h.tgz.i.tgz', 'rb')