# Concat path and file name hitlist = [path + n for n in hitlist] # Write to file with open(iflist, 'w') as text_file: for item in hitlist: text_file.write('%s\n' % item) # Build vrt from text file avrt = idir + '/rp5k130k90m_' + j + '_' + i + '_' + k + '_ifl/cfinal/' + fluxdir + '/' + j + '_' + i + '_' + k + '_' + l + '_cflux.vrt' subprocess.call([ "gdalbuildvrt", "-vrtnodata", "0", "-separate", "-input_file_list", iflist, avrt ]) # Sum flux files intersecting with polygon boundary csum = hfu.rastersum(avrt) # Write to file hfu.array2raster(otiff1, avrt, gdal.GDT_Float32, 'GTiff', csum) # Get geometry from zone shapefile with fiona.open( idir + '/rp5k130k90m_' + j + '_' + i + '_' + k + '_ifl/cfinal/' + fluxdir + '/' + j + '_' + i + '_' + k + '_' + l + '.shp', "r") as shapefile: geoms = [feature["geometry"] for feature in shapefile] # Use zone boundary to mask flux sum layer with rasterio.open(otiff1) as src: out_image, out_transform = mask(src, geoms, crop=True) out_meta = src.meta.copy() # Sum flux values to get total for the zone tcsum = np.sum(out_image) # Delete rectangle cropped raster os.remove(otiff1) # Save zone cropped raster
# Reclass loss year name rclyname = p1.rclyname # List loss year files fl = os.listdir(p1.tld + p1.fcd) fl = [i for i in fl if 'lossyear' in i and i.endswith('.tif')] # Get file name to work on fnamely = p1.tld + p1.fcd + '/' + fl[j] # Loss year lyear = hfu.raster2array(fnamely) lya = lyear[0] del lyear if gtorlt == 'gt': # Reclass loss year > 12 to 1, else 0 lya = np.where(lya > lyv, 1, 0) lya = lya.astype('int8') else: # Reclass loss year <= 12 to 1, else 0 lya = np.where((lya > 0) & (lya <= lyv), 1, 0) lya = lya.astype('int8') # Output reclassed loss file name ofn = os.path.dirname(fnamely) + os.sep + os.path.basename(fnamely).replace( 'lossyear', rclyname) # Write to array hfu.array2raster(ofn, fnamely, gdal.GDT_Byte, 'GTiff', lya)
# Get intersection of extents minx = np.max([minx, fluxext[0], tcext[0], tclext[0]]) miny = np.max([miny, fluxext[1], tcext[1], tclext[1]]) maxx = np.min([maxx, fluxext[2], tcext[2], tclext[2]]) maxy = np.min([maxy, fluxext[3], tcext[3], tclext[3]]) #-------------------------------------------------- # Read in flux raster, subset by shape bounding box dfr = hfu.raster2array(flname, clip=[minx, miny, maxx, maxy]) # If raster2array returns a valid output, keep processing if dfr != False: # Write clipped raster to file hfu.array2raster(otiff1, '#', gdal.GDT_Float32, 'GTiff', dfr[0], geotrans=dfr[1], rasterproj=rwkt) # Use zone boundary to mask flux sum layer with rasterio.open(otiff1) as src: out_image1, out_transform = mask(src, geoms, crop=True) out_meta = src.meta.copy() # Delete rectangle cropped raster os.remove(otiff1) # Sum flux values to get total for the zone fluxsum = np.sum(out_image1) # Break out of script if there's no flux if fluxsum == 0: print('no flux, exiting script') sys.exit(0)
str(j) + '_' + str(gc1) + '_' + str(ptid1) + '_' + str(gc2) + '_' + str(ptid2) + '\n') else: # Highest probability path value lcd = np.max(cct[cct > 0]) # Average prob in corridor apd = np.mean(cct[cct >= cq10]) # Get probability of dispersal values only within percentile corridor cct = np.where(cct >= cq10, cct, 0) # Output file name orast = tdir + ocp + str(gc1) + '_' + str( ptid1) + '_' + str(gc2) + '_' + str( ptid2) + '_temp.tif' #%% # Save corridor raster hfu.array2raster(orast, csrast, gdal.GDT_Float32, 'GTiff', cct) # Append file cfinishlist.append(orast) # Create new empty df and populate columns newdf = pandas.DataFrame(columns=[ 'gc1', 'ptid1', 'gc2', 'ptid2', 'cx1', 'cy1', 'cx2', 'cy2', 'dist', 'pdxmid', 'pdymid', 'lcd', 'apd' ]) newdf = newdf.append( { 'gc1': gc1, 'ptid1': int(ptid1), 'gc2': gc2, 'ptid2': int(ptid2), 'cx1': cx1, 'cy1': cy1,
# Get file name to work on fnamely = p1.tld + p1.pfdd + '/' + fl[j] # Loss year lyear = hfu.raster2array(fnamely) lya = lyear[0] del lyear # List tree cover files y2k = os.listdir(p1.tld + p1.pfdd) y2k = [i for i in y2k if 'treecover2000' in i and i.endswith('.tif')] y2k.sort() # Get file name to work on y2knamely = p1.tld + p1.pfdd + '/' + y2k[j] # Year 2000 tree cover y2k = hfu.raster2array(y2knamely)[0] # Loop through years for lyv in range(0, 19, 1): # Reclass loss year ly = np.where(lya == lyv, 1, 0) # Multiply loss year 1/0 raster by percent tree cover by 900m2 ly = ly * y2k.astype('float32') * 900 / 100.0 # Output tree cover loss name newfname = lybytc2000name + '_' + str(lyv) ofn = os.path.dirname(fnamely) + os.sep + os.path.basename( fnamely).replace('lossyear', newfname) # Write to array hfu.array2raster(ofn, fnamely, gdal.GDT_Int16, 'GTiff', np.rint(ly).astype('int16'))
str(newbb[3]), ovrt ] for myfile in flist: acmd.append(myfile) subprocess.call(acmd) # If aggregating datamask files, reclassify water values (2) to 0 if 'datamask' in p: # Read vrt to array dm = hfu.raster2array(ovrt)[0] # Reclassify dm = np.where(dm == 2, 0, dm) # Name for temporary tiff ttiff = aggodir + '/ttiff_' + p + '_' + str(k) + '.tif' # Write to tiff hfu.array2raster(ttiff, ovrt, gdal.GDT_Byte, 'GTiff', dm) # Aggregate to new resolution using average operator ofile = aggodir + '/temp_90m_' + p + '_' + str(k) + '.tif' subprocess.call([ "gdalwarp", "-multi", "-wo", "NUM_THREADS=ALL_CPUS", "-tr", aggdim, aggdim, "-r", "average", "-srcnodata", ndval, "-ot", "Float32", ttiff, ofile ]) # Create vrt from aggregated file, clipping off the collar aggvrt = aggodir + '/agg_temp_' + p + str(k) + '.vrt' subprocess.call([ "gdalbuildvrt", "-te", str(bbox[0]), str(bbox[1]), str(bbox[2]), str(bbox[3]), aggvrt, ofile
j) + '.tif' # Read in large water body tile to array lwb = hfu.raster2array(lwbtile)[0] # Burn in 255 for large water bodies. fc[lwb == 1] = 255 lwb = None # Burn in no data value as 255 from data mask dmasktile = idir + '/' + reg + '_rtile_datamask_' + str(j) + '.tif' dmask = hfu.raster2array(dmasktile)[0] fc[dmask == 0] = 255 dmask = None # Write to file csrast = odir + '/' + reg + '_rtile_cs' + str(y) + spname + '_' + str( j) + '.tif' # Save as byte hfu.array2raster(csrast, fortile, gdal.GDT_Byte, 'GTiff', fc) else: # Check for forest cover tile and make sure output doesn't exist if (os.path.exists(idir + '/' + reg + '_rtile_treecover' + str(y) + fpat + str(j) + '.tif') ) and not os.path.exists(odir + '/' + reg + '_rtile_cs' + str(y) + spname + '_' + str(j) + '.tif'): # Get forest cover tile fortile = idir + '/' + reg + '_rtile_treecover' + str(y) + fpat + str( j) + '.tif' fco = hfu.raster2array(fortile) fc = fco[0] # Transform forest cover values (high forest cover values = low cost) fc = fc.astype(np.float32) / 100 # Convert to 0-1 range # Use exponential tranformation from Keeley et al. 2016 # Transformed values range from 1 (low resistance) to 100 (high resistance)
del gain # Calculate forest loss through whatever year and update canopy cover. treecoverupdate = np.where((lya > 0) & (lya <= lyv), 0, tca) # Update with gain using 50% value treecoverupdategain = np.where(gn == 1, 50, treecoverupdate) # Make sure int8 data type treecoverupdate = treecoverupdate.astype('int8') treecoverupdategain = treecoverupdategain.astype('int8') # Output treecover file name ofn = os.path.dirname(fname2000) + os.sep + os.path.basename( fname2000).replace('treecover2000', 'treecover' + uyear) # Write to array hfu.array2raster(ofn, fname2000, gdal.GDT_Byte, 'GTiff', treecoverupdate) del treecoverupdate # Output treecover with gain file name ofn = os.path.dirname(fname2000) + os.sep + os.path.basename( fname2000).replace('treecover2000', 'treecover' + uyear + 'gn') # Write to array hfu.array2raster(ofn, fname2000, gdal.GDT_Byte, 'GTiff', treecoverupdategain) else: # Calculate forest loss through whatever year and update canopy cover. treecoverupdate = np.where((lya > 0) & (lya <= lyv), 0, tca) # Make sure int8 data type treecoverupdate = treecoverupdate.astype('int8')