levels=256)
                    contrast.values[ii, jj] = tex.greycoprops(
                        glcm, 'contrast')[0].mean()
                    dissimilarity.values[ii, jj] = tex.greycoprops(
                        glcm, 'dissimilarity')[0].mean()
                    homogeneity.values[ii, jj] = tex.greycoprops(
                        glcm, 'homogeneity')[0].mean()
                    correlation.values[ii, jj] = tex.greycoprops(
                        glcm, 'correlation')[0].mean()
                    asm.values[ii, jj] = tex.greycoprops(glcm, 'ASM')[0].mean()

        # write array to new geotiff
        prefix = sfile.split('/')[-1][:-8]
        res_str = str(resolution).zfill(3)
        io.write_xarray_to_GeoTiff(
            mean, '%s/alos_%sm/%s_%sm_mean.tif' %
            (path2textures, res_str, prefix, res_str))
        io.write_xarray_to_GeoTiff(
            variance, '%s/alos_%sm/%s_%sm_variance.tif' %
            (path2textures, res_str, prefix, res_str))
        io.write_xarray_to_GeoTiff(
            contrast, '%s/alos_%sm/%s_%sm_contrast.tif' %
            (path2textures, res_str, prefix, res_str))
        io.write_xarray_to_GeoTiff(
            dissimilarity, '%s/alos_%sm/%s_%sm_dissimilarity.tif' %
            (path2textures, res_str, prefix, res_str))
        io.write_xarray_to_GeoTiff(
            homogeneity, '%s/alos_%sm/%s_%sm_homogeneity.tif' %
            (path2textures, res_str, prefix, res_str))
        io.write_xarray_to_GeoTiff(
            correlation, '%s/alos_%sm/%s_%sm_correlation.tif' %
CEmap = acc_stats_rf_tol0['commission_error_map']
omission_classes,omission_error_count = np.unique(lc.values[OEmap==1],return_counts=True)
commission_classes,commission_error_count = np.unique(lc_rf.values[CEmap==1],return_counts=True)
OErate=omission_error_count/class_count
CErate=commission_error_count/class_count
print('0 pixel tolerance')
print('labels',lc_labels)
print('OErate',OErate*100)
print('CErate',CErate*100)
print('PA    ',(1-OErate)*100)
print('UA    ',(1-CErate)*100)
print('OA    ',(acc_stats_rf_tol0['OA'])*100)

# Error map
error_map = lc.copy(deep=True)
error_map.values*=np.nan

error_map.values[(acc_stats_rf_simple_tol0['omission_error_map']==0)*(lc_simple.values==1)] = 6      # no tree
error_map.values[(acc_stats_rf_simple_tol0['omission_error_map']==0)*(lc_simple.values==2)] = 5      # tree
error_map.values[(acc_stats_rf_simple_tol0['omission_error_map']==1)*
                 (acc_stats_rf_simple_tol1['commission_error_map']==0)*(lc_simple.values==2)] = 4    # adjacent omission error
error_map.values[(acc_stats_rf_simple_tol0['commission_error_map']==1)*
                 (acc_stats_rf_simple_tol1['commission_error_map']==0)*(lc_rf_simple.values==2)] = 2 # adjacent commision error
error_map.values[(acc_stats_rf_simple_tol1['omission_error_map']==1)*(lc_simple.values==2)] = 3   #  omission error
error_map.values[(acc_stats_rf_simple_tol1['commission_error_map']==1)*(lc_rf_simple.values==2)] = 1 # commission error
"""
save layers to file
"""
io.write_xarray_to_GeoTiff(lc_rf,'%s%s_lc_class_rf_s1___%.0fm' % (path2output,site,dx))
io.write_xarray_to_GeoTiff(error_map,'%s%s_error_map_rf_s1___%.0fm' % (path2output,site,dx))
Beispiel #3
0
# add a colorbar
cax = fig.add_axes([0.33,0.15,0.33,0.025])
plt.colorbar(im,orientation='horizontal',label='AGB / Mg ha$^{-1}$',cax=cax,
                extend='max')

fig.savefig('%slidar_AGB_models_and_CI95_%s.png' % (path2fig,version))
fig.show()

# write arrays to file
labels = ['median','95l','95u']
for ii, layer in enumerate([agb_median,agb95_l,agb95_u]):
    outfile_20m = '%s/020m/%s_%s_%s' % (pt3_output_dir,pt3_outfile_prefix,version,labels[ii])
    outfile_50m = '%s/050m/%s_%s_%s' % (pt3_output_dir,pt3_outfile_prefix,version,labels[ii])
    outfile_100m = '%s/100m/%s_%s_%s' % (pt3_output_dir,pt3_outfile_prefix,version,labels[ii])

    io.write_xarray_to_GeoTiff(layer,'%s' % outfile_20m)

    os.system("gdalwarp -overwrite -dstnodata -9999 -tr 50 -50 -r average \
            -t_srs '+proj=utm +zone=16 +datum=WGS84 +units=m +no_defs' \
            %s.tif %s.tif" % (outfile_20m,outfile_50m))

    os.system("gdalwarp -overwrite -dstnodata -9999 -tr 100 -100 -r average \
            -t_srs '+proj=utm +zone=16 +datum=WGS84 +units=m +no_defs' \
            %s.tif %s.tif" % (outfile_20m,outfile_100m))


"""
---------------------------------------------------------------------------------
REGRID TO DESIRED RESOLUTION (0.25 ha and 1 ha)
--------------------------------------------------------------------------------
"""
from scipy import ndimage as nd
import glob as glob
import sys
sys.path.append('../data_io')
import data_io as io
"""
List some file paths
"""
path2files = '/exports/csce/datastore/geos/groups/gcel/YucatanBiomass/uoe_lidar_sentinel/agb_Lidar_maps/gliht/'
path2output = '/home/dmilodow/DataStore_GCEL/YucatanBiomass/uoe_lidar_sentinel/agb_Lidar_maps/buffered_gliht/'
"""
Loop through the data files, and apply buffer in each case
"""
buffer_width = 9
ulim = 500
files = glob.glob('%s*.tif' % path2files)
count = 0
for f in files:
    count += 1
    print('\r%i/%i' % (count, len(files)))
    ds = xr.open_rasterio(f)
    mask = nd.maximum_filter(ds.values[0] < 0,
                             buffer_width,
                             mode='constant',
                             cval=0)
    ds_new = ds.copy()
    ds_new.values[0][mask] = -9999
    ds_new.values[ds.values > ulim] = -9999
    io.write_xarray_to_GeoTiff(ds_new.sel(band=1),
                               '%s%s' % (path2output, f.split('/')[-1]))
d20_bands_supres = DSen2(d10_bands,d20_bands)

# Combine the 10 m resolution layers
d10_bands = np.concatenate((d10_bands,d20_bands_supres),axis=1)
d10_bands = np.swapaxes(d10_bands,1,-1)
d10_bands = np.swapaxes(d10_bands,1,2)

# calculate the extra layers
%run /home/dmilodow/DataStore_DTM/STFC/wri_restoration_mapper/restoration-mapper/src/preprocessing/indices.py
d10_bands = evi(d10_bands, True)
d10_bands = bi(d10_bands, True)
d10_bands = msavi2(d10_bands, True)
d10_bands = si(d10_bands, True)

# load cloud mask
# !fmask_sentinel2Stacked.py -o cloud.img --safedir ../DATA/Sentinel2/awsdata/T30VUJ/L1C/S2B_MSIL1C_20190715T114359_N0208_R123_T30VUJ_20190715T150908.SAFE/
clouds = xr.open_rasterio('cloud.img')[0].sel(x=slice(W,E),y=slice(N,S)).values
cloud_and_shadow = clouds==2
cloud_and_shadow[clouds==3]=True
cloud_and_shadow[clouds==0]=True
cloud_and_shadow = resize(cloud_and_shadow,d10_bands[0,:,:,0].shape).astype('bool')
for tstep in range(0,d10_bands.shape[0]):
    for band in range(0,d10_bands.shape[-1]):
        d10_bands[tstep,:,:,band][cloud_and_shadow] = np.nan

# write layers to file for ingestion into next phase of the analysis.
coords = {'y':B04_10m.coords['y'].values,'x':B04_10m.coords['x'].values,'band':np.arange(d10_bands.shape[-1])+1}
outarray=xr.DataArray(d10_bands[0],coords=coords,dims=('y','x','band'))
outfile = '../DATA/Sentinel2/processed_bands_and_derivatives/%s_sentinel2_bands_10m' % site
io.write_xarray_to_GeoTiff(outarray,outfile,EPSG_CODE = '32630')
        diff_std_winter = vh_mean.copy(deep=True)

        vh_mean_winter.values = rescale(
            np.nanmean(vh_stack[winter_mask], axis=0))
        vh_std_winter.values = np.nanstd(vh_stack[winter_mask], axis=0)
        vv_mean_winter.values = rescale(
            np.nanmean(vv_stack[winter_mask], axis=0))
        vv_std_winter.values = np.nanstd(vv_stack[winter_mask], axis=0)
        diff_mean_winter.values = rescale(
            np.nanmean(diff_stack[winter_mask], axis=0))
        diff_std_winter.values = np.nanstd(diff_stack[winter_mask], axis=0)

        print('\t Writing to geotiff')
        vv_mean_file = '%s/S1A__IW__%s_%i_VV_tnr_bnr_Orb_Cal_TF_TC_dB_temporal_mean_summer.tif' % (
            outdir, orbit, year)
        io.write_xarray_to_GeoTiff(vv_mean_summer[0], vv_mean_file)
        vh_mean_file = '%s/S1A__IW__%s_%i_VH_tnr_bnr_Orb_Cal_TF_TC_dB_temporal_mean_summer.tif' % (
            outdir, orbit, year)
        io.write_xarray_to_GeoTiff(vh_mean_summer[0], vh_mean_file)
        diff_mean_file = '%s/S1A__IW__%s_%i_diffVVVH_tnr_bnr_Orb_Cal_TF_TC_dB_temporal_mean_summer.tif' % (
            outdir, orbit, year)
        io.write_xarray_to_GeoTiff(diff_mean_summer[0], diff_mean_file)
        vv_std_file = '%s/S1A__IW__%s_%i_VV_tnr_bnr_Orb_Cal_TF_TC_dB_temporal_stdev_summer.tif' % (
            outdir, orbit, year)
        io.write_xarray_to_GeoTiff(vv_std_summer[0], vv_std_file)
        vh_std_file = '%s/S1A__IW__%s_%i_VH_tnr_bnr_Orb_Cal_TF_TC_dB_temporal_stdev_summer.tif' % (
            outdir, orbit, year)
        io.write_xarray_to_GeoTiff(vh_std_summer[0], vh_std_file)
        diff_std_file = '%s/S1A__IW__%s_%i_diffVVVH_tnr_bnr_Orb_Cal_TF_TC_dB_temporal_stdev_summer.tif' % (
            outdir, orbit, year)
        io.write_xarray_to_GeoTiff(diff_std_summer[0], diff_std_file)
    lm = LinearRegression()
    lm.fit(X, y)
    print("calibration score: %.02f" % lm.score(X, y))

    full2 = xr.open_rasterio('%s%s2_band%i.tif' %
                             (path2full, site_id, band))[0]
    mask = np.isfinite(full2.values)
    X = full2.values[mask].reshape(mask.sum(), 1)

    full2_new = io.copy_xarray_template(full2)
    full2_new.values[mask] = lm.predict(X)
    full2_new.values[mask][full2_new.values[mask] < 0] = 0

    outfile_prefix = ('%s%s2_band%i_corrected' %
                      (path2processed, site_id, band))
    io.write_xarray_to_GeoTiff(full2_new, outfile_prefix)

    # use gdal to merge and warp to extent
    os.system(
        "gdal_merge.py -a_nodata -9999 -ot float32 -o %s%s_b%s_temp.tif %s%s2_band%i_corrected.tif %s%s1_band%i.tif"
        % (path2final, site_id, band, path2processed, site_id, band, path2full,
           site_id, band))
    os.system(
        "gdalwarp -overwrite -te %f %f %f %f -tr %f %f -r near %s%s_b%i_temp.tif %s%s_b%i_10m.tif"
        % (W, S, E, N, xres, yres, path2final, site_id, band, path2final,
           site_id, band))
    os.system("rm %s%s_b%i_temp.tif" % (path2final, site_id, band))
    os.system("chmod +777 %s%s_b%i_10m.tif" % (path2final, site_id, band))

# calculate _ndvi# ndvi = (nir-red)/(nir+red)
red = xr.open_rasterio('%s%s_b3_10m.tif' % (path2final, site_id))[0]
Beispiel #8
0
agb_stack = np.zeros((N_iter,rows,cols))
#pca_predictors = pca.transform(predictors)
#predictors=None
for ii, agb_file in enumerate(agb_list):
    print('Iteration %i of %i' % (ii+1,N_iter))
    rf_dict = joblib.load('%s%s_%s_optimised_rfbc_sentinel_alos_lidar_%s.pkl' % (path2alg,site_id,version,str(ii+1).zfill(3)))
    agb_mod = rff.rfbc_predict(rf_dict['rf1'],rf_dict['rf2'],predictors)

    #let's copy to a new xarray for AGBpot
    agb = io.copy_xarray_template(template)
    agb.values[forest_mask] = agb_mod.copy()
    agb.values[agb.values==-9999]=np.nan
    agb.values[agb.values<0]=0

    outfile_prefix = '%s%s_%s_rfbc_agb_upscaled_%s' % (path2output,site_id,version,str(ii+1).zfill(3))
    io.write_xarray_to_GeoTiff(agb,outfile_prefix)

    agb_stack[ii] = agb.values

# summary arrays
agb_med = io.copy_xarray_template(template)
agb_med.values = np.median(agb_stack,axis=0)
agb_med.values[agb_med.values==-9999]=np.nan
outfile_prefix = '%s%s_%s_rfbc_agb_upscaled_median' % (path2output,site_id,version)
io.write_xarray_to_GeoTiff(agb_med,outfile_prefix)

agb_upper = io.copy_xarray_template(template)
agb_upper.values = np.percentile(agb_stack,97.5,axis=0)
agb_upper.values[agb_upper.values==-9999]=np.nan
outfile_prefix = '%s%s_%s_rfbc_agb_upscaled_upper' % (path2output,site_id,version)
io.write_xarray_to_GeoTiff(agb_upper,outfile_prefix)
print('Generating reference maps from lidar data')
# load DSM and DTM
dtm = xr.open_rasterio(dtm_file).sel(band=1)
dsm = xr.open_rasterio(dsm_file).sel(band=1)
dtm.values[dtm.values == -9999.] = np.nan
dsm.values[dsm.values == -9999.] = np.nan

# TCH
tch = dsm - dtm
tch.values[tch.values < 0] = 0

# Cover at 2m height
cov_2m = tch.copy(deep=True)
cov_2m.values = (tch.values >= 2).astype('float')
cov_2m.values[np.isnan(tch.values)] = np.nan
io.write_xarray_to_GeoTiff(cov_2m, '%s_cover2m_1m' % site)

# - Cover at 0.1m height
cov_10cm = tch.copy(deep=True)
cov_10cm.values = (tch.values >= 0.1).astype('float')
cov_10cm.values[np.isnan(tch.values)] = np.nan
io.write_xarray_to_GeoTiff(cov_10cm, '%s_cover10cm_1m' % site)

# Identify "trees"
# - define as contiguous regions with canopy cover >2m comprising >8 pixels
#   (~3m diameter)
# - use two-step procedure
#       (i)  fill "holes"
#       (ii) remove objects based on connectivity direct connections in either
#            row or column (ignoring diagonals)
trees = cov_2m.copy(deep=True)