def do_list_tiles_by_xy_multiple(config): tiles = list_tiles(x=[124, 125], y=[-25, -24], years=[2002], satellites=["LS7"], dataset_types=[DatasetType.ARG25, DatasetType.PQ25, DatasetType.FC25], database=config.get_db_database(), user=config.get_db_username(), password=config.get_db_password(), host=config.get_db_host(), port=config.get_db_port()) for tile in tiles: _log.debug("Found tile xy = %s acq date = [%s]", tile.xy, tile.end_datetime)
def get_tiles_from_db(self): from datacube.api.query import list_tiles x_list = [self.x] y_list = [self.y] for tile in list_tiles(x=x_list, y=y_list, acq_min=self.acq_min, acq_max=self.acq_max, satellites=[satellite for satellite in self.satellites], dataset_types=self.get_dataset_types()): yield tile
def do_list_tiles_by_xy_single_to_file(config): _log.info("Testing list_tiles...") tiles = list_tiles(x=[123], y=[-25], acq_min=date(2002, 1, 1), acq_max=date(2002 ,12, 31), satellites=[Satellite.LS7], dataset_types=[DatasetType.ARG25, DatasetType.PQ25, DatasetType.FC25], database=config.get_db_database(), user=config.get_db_username(), password=config.get_db_password(), host=config.get_db_host(), port=config.get_db_port()) for tile in tiles: _log.info("Found tile xy = %s acq date = [%s] NBAR = [%s]", tile.xy, tile.end_datetime, tile.datasets[DatasetType.ARG25].path)
def run(self): self.parse_arguments() config = Config(os.path.expanduser("~/.datacube/config")) _log.debug(config.to_str()) # Clear stack files # TODO - filename consistency and safety and so on if self.stack_vrt: for satellite, dataset_type in itertools.product(self.satellites, self.dataset_types): path = os.path.join(self.output_directory, get_filename_file_list(satellite, dataset_type, self.x, self.y)) check_overwrite_remove_or_fail(path, self.overwrite) # TODO once WOFS is in the cube for tile in list_tiles(x=[self.x], y=[self.y], acq_min=self.acq_min, acq_max=self.acq_max, satellites=[satellite for satellite in self.satellites], dataset_types=intersection(self.dataset_types, dataset_type_database), database=config.get_db_database(), user=config.get_db_username(), password=config.get_db_password(), host=config.get_db_host(), port=config.get_db_port()): if self.list_only: _log.info("Would retrieve datasets [%s]", [tile.datasets[t].path for t in intersection(self.dataset_types, dataset_type_database)]) continue pqa = None # Apply PQA if specified if self.apply_pqa_filter: pqa = tile.datasets[DatasetType.PQ25] for dataset_type in intersection(self.dataset_types, dataset_type_database): retrieve_data(tile.datasets[dataset_type], pqa, self.pqa_mask, self.get_output_filename(tile.datasets[dataset_type]), tile.x, tile.y, self.overwrite, self.stack_vrt) nbar = tile.datasets[DatasetType.ARG25] self.generate_derived_nbar(intersection(self.dataset_types, dataset_type_derived_nbar), nbar, pqa, self.pqa_mask, self.overwrite) # Generate VRT stack if self.stack_vrt: for satellite, dataset_type in itertools.product(self.satellites, self.dataset_types): path = os.path.join(self.output_directory, get_filename_file_list(satellite, dataset_type, self.x, self.y)) if os.path.exists(path): for band in BANDS[dataset_type, satellite]: path_vrt = os.path.join(self.output_directory, get_filename_stack_vrt(satellite, dataset_type, self.x, self.y, band)) _log.info("Generating VRT file [%s] for band [%s]", path_vrt, band) # gdalbuildrt -separate -b <band> -input_file_list <input file> <vrt file> subprocess.call(["gdalbuildvrt", "-separate", "-b", str(band.value), "-input_file_list", path, path_vrt])
def create( self, x, y, satellites, acq_min, acq_max, dataset_types, bands, months=None, exclude=None, sort=SortType.ASC ): self.x = x self.y = y self.satellites = satellites self.acq_min = acq_min self.acq_max = acq_max self.stack = {} self.acq_stack = [] self.meta_stack = [] self.bands = bands self.tile_shape = None self.shape_stack = [] tiles = list_tiles([x], [y], satellites, acq_min, acq_max, dataset_types, months, exclude, sort) for tile in tiles: dataset = DatasetType.ARG25 in tile.datasets and tile.datasets[DatasetType.ARG25] or None if dataset is None: continue tile_metadata = get_dataset_metadata(dataset) if tile_metadata is None: continue pqa = DatasetType.PQ25 in tile.datasets and tile.datasets[DatasetType.PQ25] or None if pqa is None: continue if self.pqa_stack is None: self.pqa_stack = [pqa] else: self.pqa_stack.append(pqa) data = get_dataset_data(dataset, bands) need_shape = True for band in data: if need_shape: self.shape_stack.append(np.array(data[band]).shape) need_shape = False if band in self.stack: """ Append it """ self.stack[band] = np.vstack((self.stack[band], np.array(data[band]).ravel())) else: self.stack[band] = np.array(data[band]).ravel() self.acq_stack.append(tile.start_datetime) self.meta_stack.append(tile_metadata) del data del pqa del tile_metadata del dataset
def do_list_tiles_by_xy_single(): _log.info("Testing list_tiles...") tiles = list_tiles(x=[120], y=[-20], acq_min=date(2005, 1, 1), acq_max=date(2005, 12, 31), satellites=[Satellite.LS5, Satellite.LS7, Satellite.LS8], dataset_types=[DatasetType.ARG25]) # dataset_types=[DatasetType.ARG25, DatasetType.PQ25, DatasetType.FC25]) # dataset_types=[DatasetType.ARG25, DatasetType.PQ25, DatasetType.FC25, DatasetType.DEM, DatasetType.NDVI]) count = {Satellite.LS5: 0, Satellite.LS7: 0, Satellite.LS8: 0} for tile in tiles: _log.info("Found tile xy = %s acq date = [%s] NBAR = [%s]", tile.xy, tile.end_datetime, tile.datasets[DatasetType.ARG25].path) count[tile.datasets[DatasetType.ARG25].satellite] += 1 _log.info(count)
def get_tile_listing(xa,ya,start,end,satellite,datasets,months=None): """ List tiles. Months will only show the requested months """ tiles = list_tiles(x=xa,y=ya,acq_min=start,acq_max=end,satellites = satellite,dataset_types=datasets) data = "{\"request\":\"DONE\",\"tiles\":[" data_arr = [] for tile in tiles: if months: print tile.start_datetime.month if tile.start_datetime.month in months: data_arr.append("{\"x\":"+str(tile.x)+",\"y\":"+str(tile.y)+",\"date\":\""+str(tile.start_datetime)+"\"}") else: data_arr.append("{\"x\":"+str(tile.x)+",\"y\":"+str(tile.y)+",\"date\":\""+str(tile.start_datetime)+"\"}") data+=','.join(data_arr)+"]}" return data
def do_list_tiles_by_xy_single_no_ls7_slc_off(): _log.info("Testing list_tiles NO LS7 SLC OFF...") from datetime import date tiles = list_tiles(x=[120], y=[-20], acq_min=date(2005, 1, 1), acq_max=date(2005, 12, 31), satellites=[Satellite.LS5, Satellite.LS7, Satellite.LS8], dataset_types=[DatasetType.ARG25], # dataset_types=[DatasetType.ARG25, DatasetType.PQ25, DatasetType.FC25], # dataset_types=[DatasetType.ARG25, DatasetType.PQ25, DatasetType.FC25, DatasetType.DEM, DatasetType.NDVI], exclude=[LS7_SLC_OFF_EXCLUSION]) count = {Satellite.LS5: 0, Satellite.LS7: 0, Satellite.LS8: 0} for tile in tiles: _log.debug("Found tile xy = %s acq date = [%s] NBAR = [%s]", tile.xy, tile.end_datetime, tile.datasets[DatasetType.ARG25].path) count[tile.datasets[DatasetType.ARG25].satellite] += 1 _log.info(count)
def get_tiles_from_db(self): from datacube.api.query import list_tiles x_list = [self.x] y_list = [self.y] dataset_types = [self.dataset_type] if self.mask_pqa_apply: dataset_types.append(DatasetType.PQ25) if self.mask_wofs_apply: dataset_types.append(DatasetType.WATER) for tile in list_tiles(x=x_list, y=y_list, acq_min=self.acq_min, acq_max=self.acq_max, satellites=[satellite for satellite in self.satellites], dataset_types=dataset_types): yield tile
def get_tiles_from_db(self): from datacube.api.query import list_tiles x_list = [self.x] y_list = [self.y] dataset_types = [self.dataset_type] if self.mask_pqa_apply and DatasetType.PQ25 not in dataset_types: dataset_types.append(DatasetType.PQ25) if self.mask_wofs_apply and DatasetType.WATER not in dataset_types: dataset_types.append(DatasetType.WATER) exclude = None if not self.include_ls8_pre_wrs2 or not self.include_ls8_pre_wrs2: exclude = [] if not self.include_ls7_slc_off: exclude.append(LS7_SLC_OFF_EXCLUSION) if not self.include_ls8_pre_wrs2: exclude.append(LS8_PRE_WRS_2_EXCLUSION) include = None acq_min = self.acq_min acq_max = self.acq_max if self.season: season_name, (season_start_month, season_start_day), (season_end_month, season_end_day) = self.season acq_min, acq_max, include = build_date_criteria(acq_min, acq_max, season_start_month, season_start_day, season_end_month, season_end_day) for tile in list_tiles(x=x_list, y=y_list, acq_min=acq_min, acq_max=acq_max, satellites=[satellite for satellite in self.satellites], dataset_types=dataset_types, exclude=exclude, include=include): yield tile
def get_tiles_from_db(self): from datacube.api.query import list_tiles x_list = [self.x] y_list = [self.y] dataset_types = [self.dataset_type] if self.mask_pqa_apply: dataset_types.append(DatasetType.PQ25) if self.mask_wofs_apply: dataset_types.append(DatasetType.WATER) for tile in list_tiles( x=x_list, y=y_list, acq_min=self.acq_min, acq_max=self.acq_max, satellites=[satellite for satellite in self.satellites], dataset_types=dataset_types): yield tile
def run(self): self.parse_arguments() config = Config() _log.debug(config.to_str()) cell_x, cell_y = latlon_to_cell(self.latitude, self.longitude) # TODO once WOFS is in the cube if self.dataset_type in dataset_type_database: # TODO - PQ is UNIT16 (others are INT16) and so -999 NDV doesn't work ndv = self.dataset_type == DatasetType.PQ25 and UINT16_MAX or NDV headered = False with self.get_output_file(self.dataset_type, self.overwrite) as csv_file: csv_writer = csv.writer(csv_file, delimiter=self.delimiter) for tile in list_tiles(x=[cell_x], y=[cell_y], acq_min=self.acq_min, acq_max=self.acq_max, satellites=[satellite for satellite in self.satellites], dataset_types=[self.dataset_type], database=config.get_db_database(), user=config.get_db_username(), password=config.get_db_password(), host=config.get_db_host(), port=config.get_db_port()): # Output a HEADER if not headered: header_fields = ["SATELLITE", "ACQUISITION DATE"] + [b.name for b in tile.datasets[self.dataset_type].bands] csv_writer.writerow(header_fields) headered = True pqa = None # Apply PQA if specified if self.apply_pqa_filter: pqa = tile.datasets[DatasetType.PQ25] data = retrieve_pixel_value(tile.datasets[self.dataset_type], pqa, self.pqa_mask, self.latitude, self.longitude, ndv=ndv) _log.debug("data is [%s]", data) if has_data(tile.datasets[self.dataset_type], data, no_data_value=ndv) or self.output_no_data: csv_writer.writerow([tile.datasets[self.dataset_type].satellite.value, str(tile.end_datetime)] + decode_data(tile.datasets[self.dataset_type], data)) elif self.dataset_type == DatasetType.WATER: base = "/g/data/u46/wofs/water_f7q/extents/{x:03d}_{y:04d}/LS*_WATER_{x:03d}_{y:04d}_*.tif".format(x=cell_x, y=cell_y) headered = False with self.get_output_file(self.dataset_type, self.overwrite) as csv_file: csv_writer = csv.writer(csv_file, delimiter=self.delimiter) for f in glob.glob(base): _log.debug(" *** Found WOFS file [%s]", f) satellite, dataset_type, x, y, acq_dt = extract_fields_from_filename(os.path.basename(f)) if acq_dt.date() < self.acq_min or acq_dt.date() > self.acq_max: continue dataset = DatasetTile.from_path(f) _log.debug("Found dataset [%s]", dataset) # Output a HEADER if not headered: header_fields = ["SATELLITE", "ACQUISITION DATE"] + [b.name for b in dataset.bands] csv_writer.writerow(header_fields) headered = True data = retrieve_pixel_value(dataset, None, None, self.latitude, self.longitude) _log.debug("data is [%s]", data) # TODO if True or self.output_no_data: csv_writer.writerow([satellite.value, str(acq_dt), decode_wofs_water_value(data[Wofs25Bands.WATER][0][0]), str(data[Wofs25Bands.WATER][0][0])])
def obtain_water_statistics(x,y,start,end,satellite,months=None): StartDate = start EndDate = end f_name = [str(x),str(y),str(start),str(end),str(satellite)] if not months is None: mstr = "+".join([str(m) for m in months]) f_name.append(mstr) pass f_name = "_".join(f_name) t_name = f_name t_name = t_name.replace('[','') t_name = t_name.replace(']','') t_name = t_name.replace('<','') t_name = t_name.replace('>','') t_name = hashlib.sha512(t_name).hexdigest()[0:32] f_name = '/tilestore/tile_cache/'+f_name+'.png' t_name = '/tilestore/tile_cache/'+t_name+'.tif' total_count = None wet_count = None tile_metadata = None tiles = list_tiles(x=[x],y=[y],acq_min=StartDate,acq_max=EndDate,satellites=satellite,dataset_types=[DatasetType.ARG25,DatasetType.PQ25],sort=SortType.ASC,months=months) for tile in tiles: dataset = DatasetType.ARG25 in tile.datasets and tile.datasets[DatasetType.ARG25] or None if dataset is None: continue pqa = DatasetType.PQ25 in tile.datasets and tile.datasets[DatasetType.PQ25] or None tile_metadata = get_dataset_metadata(dataset) mask1 = None mask2 = None wet = get_mask_pqa(pqa,[PqaMask.PQ_MASK_LAND],mask=mask2) #wet = ~wet #print 'wet mask initial' #print wet clear = get_mask_pqa(pqa, [PqaMask.PQ_MASK_CLOUD,PqaMask.PQ_MASK_CONTIGUITY,PqaMask.PQ_MASK_SATURATION],mask=mask1) clear = ~clear #print 'clear mask initial' #print clear wet_mask = wet & clear """ Count total entries """ if total_count is None: #print 'Init total_count' total_count = numpy.zeros((clear.shape)) pass if wet_count is None: #print 'Init wet_count' wet_count = numpy.zeros((wet.shape)) pass try: total_count[clear] = total_count[clear]+1 wet_count[wet_mask] = wet_count[wet_mask]+1 except: pass """ Next iteration """ #print 'DING' continue #print 'Wet Count' #print wet_count #print 'Total Count' #print total_count #print 'Percentage' if total_count is None or wet_count is None: return 'None' if not numpy.any(total_count): return 'None' wetper = wet_count/total_count #print wetper """ Make a colorized image """ """ 1%: Red 5% Yellow 20%: Green 50%: Light Blue 80%: Blue """ rgb = numpy.zeros((wet_count.shape[0],wet_count.shape[1],3),'uint8') red_mask = numpy.array(wetper) red_mask[(red_mask>=0.01)*(red_mask<0.05)] = -998.0 red_mask = numpy.in1d(red_mask.ravel(),-998.0).reshape(red_mask.shape) yellow_mask = numpy.array(wetper) yellow_mask[(yellow_mask<0.2)*(yellow_mask>=0.05)] = -998.0 yellow_mask = numpy.in1d(yellow_mask.ravel(),-998.0).reshape(yellow_mask.shape) green_mask = numpy.array(wetper) green_mask[(green_mask<0.5)*(green_mask>=0.2)] = -998.0 green_mask = numpy.in1d(green_mask.ravel(),-998.0).reshape(green_mask.shape) lblue_mask = numpy.array(wetper) lblue_mask[(lblue_mask<0.8)*(lblue_mask>=0.5)] = -998.0 lblue_mask = numpy.in1d(lblue_mask.ravel(),-998.0).reshape(lblue_mask.shape) blue_mask = numpy.array(wetper) blue_mask[blue_mask>=0.8] = -998.0 blue_mask = numpy.in1d(blue_mask.ravel(),-998.0).reshape(blue_mask.shape) rgb[...,2][blue_mask] = 255 rgb[...,2][lblue_mask] = 150 rgb[...,1][lblue_mask] = 150 rgb[...,1][green_mask] = 255 rgb[...,1][yellow_mask] = 255 rgb[...,0][yellow_mask] = 255 rgb[...,0][red_mask] = 255 driver = gdal.GetDriverByName("GTiff") #Produce output raster = driver.Create(t_name, wet.shape[1], wet.shape[0], 3, gdal.gdalconst.GDT_Int16, options=["BIGTIFF=YES", "INTERLEAVE=BAND"]) raster.SetGeoTransform(tile_metadata.transform) raster.SetProjection(tile_metadata.projection) index = 1 for i in range(3): stack_band = raster.GetRasterBand(index) stack_band.SetNoDataValue(0) stack_band.WriteArray(rgb[...,i]) stack_band.ComputeStatistics(True) stack_band.FlushCache() del stack_band index+=1 raster.FlushCache() del raster return t_name
def preview_cloudfree_mosaic(x,y,start,end, bands, satellite,iterations=0,xsize=2000,ysize=2000,file_format="GTiff",data_type=gdal.GDT_CInt16): def resize_array(arr,size): r = numpy.array(arr).astype(numpy.int16) i = Image.fromarray(r) i2 = i.resize(size,Image.NEAREST) r2 = numpy.array(i2) del i2 del i del r return r2 StartDate = start EndDate = end best_data = {} band_str = "+".join([band.name for band in bands]) sat_str = "+".join([sat.name for sat in satellite]) cache_id = ["preview",str(x),str(y),str(start),str(end),band_str,sat_str,str(xsize),str(ysize),file_format,str(iterations)] f_name = "_".join(cache_id) f_name = f_name.replace(" ","_") c_name = f_name cached_res = cache.get(c_name) if cached_res: return str(cached_res) f_name = os.path.join("/tilestore/tile_cache",f_name) tiles = list_tiles(x=[x], y=[y],acq_min=StartDate,acq_max=EndDate,satellites=satellite,dataset_types=[DatasetType.ARG25,DatasetType.PQ25], sort=SortType.ASC) tile_metadata = None tile_count = 0 tile_filled = False for tile in tiles: if tile_filled: break print "merging on tile "+str(tile.x)+", "+str(tile.y) tile_count+=1 dataset = DatasetType.ARG25 in tile.datasets and tile.datasets[DatasetType.ARG25] or None if dataset is None: print "No dataset availible" tile_count-=1 continue tile_metadata = get_dataset_metadata(dataset) if tile_metadata is None: print "NO METADATA" tile_count-=1 continue pqa = DatasetType.PQ25 in tile.datasets and tile.datasets[DatasetType.PQ25] or None mask = None mask = get_mask_pqa(pqa,[PqaMask.PQ_MASK_CLEAR],mask=mask) band_data = get_dataset_data_masked(dataset, mask=mask,bands=bands) swap_arr = None for band in band_data: if not band in best_data: print "Adding "+band.name bd = resize_array(band_data[band],(2000,2000)) print bd best_data[band]=bd del bd else: best = resize_array(best_data[band],(2000,2000)) swap_arr=numpy.in1d(best.ravel(),-999).reshape(best.shape) b_data = numpy.array(band_data[band]) best[swap_arr]=b_data[swap_arr] best_data[band]=numpy.copy(best) del b_data del best del swap_arr if iterations > 0: if tile_count>iterations: print "Exiting after "+str(iterations)+" iterations" break numberOfBands=len(bands) if numberOfBands == 0: return "None" if bands[0] not in best_data: print "No data was merged for "+str(x)+", "+str(y) return "None" numberOfPixelsInXDirection=len(best_data[bands[0]]) numberOfPixelsInYDirection=len(best_data[bands[0]][0]) if tile_count <1: print "No tiles found for "+str(x)+", "+str(y) return "None" driver = gdal.GetDriverByName(file_format) if driver is None: print "No driver found for "+file_format return "None" print f_name+'.tif' raster = driver.Create(f_name+'.tif', numberOfPixelsInXDirection, numberOfPixelsInYDirection, numberOfBands, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"]) gt = tile_metadata.transform gt2 = (gt[0],gt[1]*2.0,gt[2],gt[3],gt[4],gt[5]*2.0) tile_metadata.transform = gt2 raster.SetGeoTransform(tile_metadata.transform) print tile_metadata.transform raster.SetProjection(tile_metadata.projection) index = 1 for band in bands: stack_band = raster.GetRasterBand(index) stack_band.SetNoDataValue(-999) stack_band.WriteArray(best_data[band]) stack_band.ComputeStatistics(True) index+=1 stack_band.FlushCache() del stack_band raster.FlushCache() del raster cache.set(c_name,f_name+".tif") return f_name+".tif"
def obtain_cloudfree_mosaic(x,y,start,end, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None): StartDate = start EndDate = end best_data = {} band_str = "+".join([band.name for band in bands]) sat_str = "+".join([sat.name for sat in satellite]) cache_id = [str(x),str(y),str(start),str(end),band_str,sat_str,str(xsize),str(ysize),file_format,str(iterations)] f_name = "_".join(cache_id) f_name = f_name.replace(" ","_") c_name = f_name cached_res = cache.get(c_name) if cached_res: return str(cached_res) f_name = os.path.join("/tilestore/tile_cache",f_name) tiles = list_tiles(x=[x], y=[y],acq_min=StartDate,acq_max=EndDate,satellites=satellite,dataset_types=[DatasetType.ARG25,DatasetType.PQ25], sort=SortType.ASC) tile_metadata = None tile_count = 0 tile_filled = False stats_file = open(f_name+'.csv','w+') total_ins = 0 for tile in tiles: if tile_filled: break if months: print tile.start_datetime.month if not tile.start_datetime.month in months: continue #print "merging on tile "+str(tile.x)+", "+str(tile.y) tile_count+=1 dataset = DatasetType.ARG25 in tile.datasets and tile.datasets[DatasetType.ARG25] or None if dataset is None: print "No dataset availible" tile_count-=1 continue tile_metadata = get_dataset_metadata(dataset) if tile_metadata is None: print "NO METADATA" tile_count-=1 continue pqa = DatasetType.PQ25 in tile.datasets and tile.datasets[DatasetType.PQ25] or None mask = None mask = get_mask_pqa(pqa,[PqaMask.PQ_MASK_CLEAR],mask=mask) band_data = get_dataset_data_masked(dataset, mask=mask,bands=bands) swap_arr = None best = None good_ins = None for band in band_data: if not band in best_data: #print "Adding "+band.name #print band_data[band] best_data[band]=band_data[band] best = numpy.array(best_data[band]) swap_arr=numpy.in1d(best.ravel(),-999).reshape(best.shape) good_ins = len(numpy.where(best[swap_arr]!=-999)[0]) else: best = numpy.array(best_data[band]) swap_arr=numpy.in1d(best.ravel(),-999).reshape(best.shape) b_data = numpy.array(band_data[band]) best[swap_arr]=b_data[swap_arr] best_data[band]=numpy.copy(best) good_ins = len(numpy.where(b_data[swap_arr]!=-999)[0]) del b_data total_ins+=good_ins stats_file.write(str(tile.x)+','+str(tile.y)+','+str(tile.start_datetime.year)+','+str(tile.start_datetime.month)+','+str(len(best[swap_arr]))+','+str(good_ins)+','+str(total_ins)+','+str(tile.dataset)+"\n") del swap_arr del best del good_ins if iterations > 0: if tile_count>iterations: print "Exiting after "+str(iterations)+" iterations" break numberOfBands=len(bands) if numberOfBands == 0: return "None" if bands[0] not in best_data: print "No data was merged for "+str(x)+", "+str(y) return "None" numberOfPixelsInXDirection=len(best_data[bands[0]]) print numberOfPixelsInXDirection numberOfPixelsInYDirection=len(best_data[bands[0]][0]) print numberOfPixelsInYDirection pixels = numberOfPixelsInXDirection if numberOfPixelsInYDirection > numberOfPixelsInXDirection: pixels = numberOfPixelsInYDirection if tile_count <1: print "No tiles found for "+str(x)+", "+str(y) return "None" driver = gdal.GetDriverByName(file_format) if driver is None: print "No driver found for "+file_format return "None" #print f_name+'.tif' raster = driver.Create(f_name+'.tif', pixels, pixels, numberOfBands, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"]) raster.SetGeoTransform(tile_metadata.transform) raster.SetProjection(tile_metadata.projection) index = 1 stats_file.close() for band in bands: stack_band = raster.GetRasterBand(index) stack_band.SetNoDataValue(-999) stack_band.WriteArray(best_data[band]) stack_band.ComputeStatistics(True) index+=1 stack_band.FlushCache() del stack_band raster.FlushCache() del raster cache.set(c_name,f_name+".tif") return f_name+".tif"