def iterate(self): d = self.d f0 = self.f0 nbands = self.nbands nsets = self.nsets M = self.M det = self.det s = self.s # fk is the angular part # rho/f0 - 1 = f1 k1 + f2 k2 fk = ma.array(np.zeros_like(d['refl']),mask=d['mask']) for i in xrange(nsets): fk[:,i] = d['refl'][:,i]/f0 - 1.0 f1 = ma.array(np.zeros_like(f0[:,0,:,:])) f2 = ma.array(np.zeros_like(f0[:,0,:,:])) P = [f1,f2] # solve for estimate of f1, f2 # given f0 # for each band for i in xrange(nbands): V = np.zeros(2).astype(object) # for each sample for j in xrange(nsets): V[0] += np.sum(fk[i,j] * d['Ross'][j],axis=0) V[1] += np.sum(fk[i,j] * d['Li'][j],axis=0) for k in xrange(2): P[k][i] = (V[0]*M[k][0] + V[1]*M[k][1])/det # now model 1 + f1 k1 + f2 k2 model = np.zeros_like(d['refl']) for i in xrange(nsets): # bands for j in xrange(nbands): model[j,i] = 1.0 + P[0][j]*d['Ross'][i] + P[1][j]*d['Li'][i] model = ma.array(model,mask=d['mask']) # re-estimate f0 = rho/(1+f1 k1 + f2 k2) # and average over all samples for given band / location / day f0_old = f0 f0 = (d['refl']/model).mean(axis=1) # and smooth it sz = smoothn(f0,z0=f0_old,axis=1,s=s,isrobust=self.isrobust) s = sz[1] f0 = sz[0] self.outlier_wt = self.sz[3] self.f0 = f0 self.sz = sz self.s = s self.model = model self.P = P
def iterate(self): d = self.d f0 = self.f0 nbands = self.nbands nsets = self.nsets M = self.M det = self.det s = self.s # fk is the angular part # rho/f0 - 1 = f1 k1 + f2 k2 fk = ma.array(np.zeros_like(d["refl"]), mask=d["mask"]) for i in xrange(nsets): fk[:, i] = d["refl"][:, i] / f0 - 1.0 f1 = ma.array(np.zeros_like(f0[:, 0, :, :])) f2 = ma.array(np.zeros_like(f0[:, 0, :, :])) P = [f1, f2] # solve for estimate of f1, f2 # given f0 # for each band for i in xrange(nbands): V = np.zeros(2).astype(object) # for each sample for j in xrange(nsets): V[0] += np.sum(fk[i, j] * d["Ross"][j], axis=0) V[1] += np.sum(fk[i, j] * d["Li"][j], axis=0) for k in xrange(2): P[k][i] = (V[0] * M[k][0] + V[1] * M[k][1]) / det # now model 1 + f1 k1 + f2 k2 model = np.zeros_like(d["refl"]) for i in xrange(nsets): # bands for j in xrange(nbands): model[j, i] = 1.0 + P[0][j] * d["Ross"][i] + P[1][j] * d["Li"][i] model = ma.array(model, mask=d["mask"]) # re-estimate f0 = rho/(1+f1 k1 + f2 k2) # and average over all samples for given band / location / day f0_old = f0 f0 = (d["refl"] / model).mean(axis=1) # and smooth it sz = smoothn(f0, z0=f0_old, axis=1, s=s, isrobust=self.isrobust) s = sz[1] f0 = sz[0] self.outlier_wt = self.sz[3] self.f0 = f0 self.sz = sz self.s = s self.model = model self.P = P
def __init__(self,d,s=None,isrobust=True,f0=None): # initialise d = d.copy() mask = d['mask'][0] d['Ross'] = ma.array(d['Ross'],mask=mask) d['Li'] = ma.array(d['Li'],mask=mask) # form kernel estimation matrix terms k11 = np.sum(np.sum(d['Ross'] * d['Ross'],axis=1),axis=0) k22 = np.sum(np.sum(d['Li'] * d['Li'],axis=1),axis=0) k12 = np.sum(np.sum(d['Ross'] * d['Li'],axis=1),axis=0) k1 = np.sum(np.sum(d['Ross'],axis=1),axis=0) k2 = np.sum(np.sum(d['Li'],axis=1),axis=0) det = k11*k22 - k12*k12 #det = N * det1 - k1*k1*k22 - k2*k2*k11 + 2 * k1*k2*k12 M = np.zeros((2,2)).astype(object) M[0][0] = k22 M[0][1] = M[1][0] = -k12 M[1][1] = k11 # initial estimate at f0 d['mask'] = (d['refl']>1.) | (d['refl']<0.) | d['mask'] d['refl'] = ma.array(d['refl'],mask= d['mask']) if f0 == None: f0 = d['refl'].mean(axis=1) sz = smoothn(f0,axis=1,s=s,isrobust=isrobust) self.s = sz[1] self.f0 = sz[0] self.sz = sz self.outlier_wt = self.sz[3] else: self.f0 = f0 self.nsets = d['refl'].shape[1] self.nbands = d['refl'].shape[0] self.M = M self.det = det self.mask = mask self.d = d self.isrobust = isrobust self.model = np.ones_like(d['refl'])
def __init__(self, d, s=None, isrobust=True, f0=None): # initialise d = d.copy() mask = d["mask"][0] d["Ross"] = ma.array(d["Ross"], mask=mask) d["Li"] = ma.array(d["Li"], mask=mask) # form kernel estimation matrix terms k11 = np.sum(np.sum(d["Ross"] * d["Ross"], axis=1), axis=0) k22 = np.sum(np.sum(d["Li"] * d["Li"], axis=1), axis=0) k12 = np.sum(np.sum(d["Ross"] * d["Li"], axis=1), axis=0) k1 = np.sum(np.sum(d["Ross"], axis=1), axis=0) k2 = np.sum(np.sum(d["Li"], axis=1), axis=0) det = k11 * k22 - k12 * k12 # det = N * det1 - k1*k1*k22 - k2*k2*k11 + 2 * k1*k2*k12 M = np.zeros((2, 2)).astype(object) M[0][0] = k22 M[0][1] = M[1][0] = -k12 M[1][1] = k11 # initial estimate at f0 d["mask"] = (d["refl"] > 1.0) | (d["refl"] < 0.0) | d["mask"] d["refl"] = ma.array(d["refl"], mask=d["mask"]) if f0 == None: f0 = d["refl"].mean(axis=1) sz = smoothn(f0, axis=1, s=s, isrobust=isrobust) self.s = sz[1] self.f0 = sz[0] self.sz = sz self.outlier_wt = self.sz[3] else: self.f0 = f0 self.nsets = d["refl"].shape[1] self.nbands = d["refl"].shape[0] self.M = M self.det = det self.mask = mask self.d = d self.isrobust = isrobust self.model = np.ones_like(d["refl"])
def S2_aot(self, ): self.wl = 0.490, 0.560, 0.665, 0.842, 1.610, 2.190, 0.865 self.bands = 'B02', 'B03', 'B04', 'B08', 'B11', 'B12', 'B8A' m = mgrs.MGRS() mg_coor = m.toMGRS(self.lat, self.lon, MGRSPrecision=4) self.place = mg_coor[:5] self.Hfile = os.getcwd()+'/s_data/%s/%s/%s/%d/%d/%d/0/'%(self.S2_fname[:2], self.S2_fname[2],\ self.S2_fname[3:5], self.year, self.S2_month, self.S2_day) self.Lfile = glob.glob( '%s/MCD43A1.A%d%03d.h%02dv%02d.006.*.hdf' % (self.mdata, self.year, self.S2_doy, self.h, self.v))[0] mete = readxml('%smetadata.xml' % self.Hfile) self.sza = np.zeros(7) self.sza[:] = mete['mSz'] self.saa = self.sza.copy() self.saa[:] = mete['mSa'] # sometimes not all of the angles are available try: self.vza = (mete['mVz'])[[1, 2, 3, 7, 11, 12, 8], ] self.vaa = (mete['mVa'])[[1, 2, 3, 7, 11, 12, 8], ] except: self.vza = np.repeat(np.nanmean(mete['mVz']), 7) self.vaa = np.repeat(np.nanmean(mete['mVa']), 7) ll, ul, lr, ur = mg.toWgs(u'%s0000000000'%self.S2_fname), mg.toWgs(u'%s0000099999'%self.S2_fname),\ mg.toWgs(u'%s9999900000'%self.S2_fname), mg.toWgs(u'%s9999999999'%self.S2_fname) self.dic = { 'LL_LAT': ll[0], 'LL_LON': ll[1], 'LR_LAT': lr[0], 'LR_LON': lr[1], 'UL_LAT': ul[0], 'UL_LON': ul[1], 'UR_LAT': ur[0], 'UR_LON': ur[1] } self.corners = 10000, 10000 #self.L_inds, self.H_inds = get_coords(self.lat,self.lon) self.L_inds, self.H_inds = MSL_geo_trans(self.lat, self.lon, self.dic, self.corners) self.Lx, self.Ly = self.L_inds self.Hx, self.Hy = self.H_inds if glob.glob(self.Lfile + '_S2_aoi_brdf.pkl') == []: self.brdf, self.qa = get_brdf_six( self.Lfile, (self.sza, self.vza, self.vaa - self.saa), bands=[3, 4, 1, 2, 2, 6, 7], flag=None, Linds=self.L_inds) pkl.dump(np.array([self.brdf, self.qa]), open(self.Lfile + '_S2_aoi_brdf.pkl', 'w')) else: self.brdf, self.qa = pkl.load( open(self.Lfile + '_S2_aoi_brdf.pkl', 'r')) if glob.glob(self.Hfile + 'cloud.tif') == []: cl = classification(fhead=self.Hfile, bands=(2, 3, 4, 8, 11, 12, 13), bounds=None) cl.Get_cm_p() self.cloud = cl.cm.copy() tifffile.imsave(self.Hfile + 'cloud.tif', self.cloud.astype(int)) self.H_data = np.repeat(np.repeat(cl.b12, 2, axis=1), 2, axis=0) del cl else: self.cloud = tifffile.imread(self.Hfile + 'cloud.tif') struct = ndimage.generate_binary_structure(2, 2) self.dia_cloud = ndimage.binary_dilation(self.cloud.astype(bool), structure=struct, iterations=60).astype(bool) shape = (10000, 10000) xstd, ystd, angle, xs, ys = self.S2_psf[:5] self.shx, self.shy = (self.Hx + xs).astype(int), (self.Hy + ys).astype(int) self.val = (self.Hx + xs < shape[0]) & (self.Hy + ys < shape[1]) & ( self.Hx + xs > 0) & (self.Hy + ys > 0) self.ker = self.gaussian(xstd, ystd, angle, True) retval = parmap(self.S2_get_to_cor, self.bands, nprocs=len(self.bands)) self.S2_mask = np.array(retval)[:, 1, :].astype(bool) self.S2_data = np.array(retval)[:, 0, :] Mcomb_mask = np.all(self.qa < 2, axis=0) Scomb_mask = np.all(self.S2_mask, axis=0) s2 = self.S2_data.copy() br = self.brdf.copy() s2[:, (~Scomb_mask) | (~Mcomb_mask[self.val])] = np.nan s2[np.isnan(s2)], br[np.isnan(br)] = -9999999, -9999999 mas = np.all((br[:, self.val] > 0) & (br[:, self.val] < 1) & (s2 > 0) & (s2 < 1), axis=0) self.to_cor = self.shx[self.val][mas], self.shy[ self.val][mas], s2[:, mas], br[:, self.val][:, mas] dif = self.to_cor[3] - self.to_cor[2] u, d = dif.mean(axis=1) + 3 * dif.std(axis=1), dif.mean( axis=1) - 3 * dif.std(axis=1) in_mask = np.all(np.array([(dif[i] > d[i]) & (dif[i] < u[i]) for i in range(len(dif))]), axis=0) self.to_cor = self.shx[self.val][mas][in_mask], self.shy[self.val][mas][in_mask],\ s2[:,mas][:,in_mask], br[:,self.val][:,mas][:, in_mask] self.qa = magic**(self.qa[:, self.val][:, mas][:, in_mask]) self.emus = parallel_rw_pkl(None, '6S_emulation_S2_', 'r') self.w = (np.array(self.wl))**(-self._alpha) self.w = self.w / (self.w.sum()) self.patch_pixs = 300 patches = [] self.inds = [] indx, indy = self.to_cor[:2] self.post_uncs = [] for i in np.arange(0, np.ceil(shape[0] / self.patch_pixs)): for j in np.arange(0, np.ceil(shape[1] / self.patch_pixs)): patch_mask = (indx>i*self.patch_pixs) & (indx<(i+1)*self.patch_pixs)\ & (indy>j*self.patch_pixs) & (indy<(j+1)*self.patch_pixs) if patch_mask.sum() == 0: patches.append(([0, 0, 0], 0)) self.inds.append([i, j]) else: patches.append(self._S2_opt(i, j, patch_mask)) self.inds.append([i, j]) self.inds = np.array(self.inds) paras = np.array([i[0] for i in patches]) cost = np.array([i[1] for i in patches ]).reshape(int(self.inds[:, 0].max() + 1), int(self.inds[:, 1].max() + 1)) para_names = 'aot', 'twv', 'tco' masks = [] para_maps = [] smed_paras = [] unc_maps = [] for _ in range(3): mask = (np.array(paras[:, _]).reshape(cost.shape) == 0) | np.isnan(cost) masks.append(mask) unc = np.array([ np.r_[np.array([i[0], i[1]]), i[2][_]] for i in self.post_uncs ]) unc_map = np.zeros_like(cost) unc_map[:] = np.nan unc_map[unc[:, 0].astype(int), unc[:, 1].astype(int)] = unc[:, 2] unc_maps.append(unc_map) w = np.zeros_like(cost) w[~mask] = 1. / unc_map[~mask] para_map = np.zeros_like(cost) para_map[mask] = paras[:, _].reshape(cost.shape)[~mask].mean() para_map[~mask] = paras[:, _].reshape(cost.shape)[~mask] para_maps.append(para_map) smed_para = smoothn(para_map, s=0.05, W=w**2, isrobust=True)[0] smed_paras.append(smed_para) tifffile.imsave(self.Hfile + '%s.tiff' % para_names[_], smed_para) self.masks, self.para_maps, self.unc_maps, self.smed_paras = masks, para_maps, unc_maps, smed_paras return unc_maps, smed_paras, para_maps
def modis_aot(self): ''' Load data and solve the aot problem self.bands = 'B02', 'B03', 'B04', 'B08', 'B11', 'B12', 'B8A' m = mgrs.MGRS() mg_coor = m.toMGRS(self.lat, self.lon, MGRSPrecision=4) self.place = mg_coor[:5] self.Hfile = os.getcwd()+'/s_data/%s/%s/%s/%d/%d/%d/0/'%(self.S2_fname[:2], self.S2_fname[2],\ self.S2_fname[3:5], self.year, self.S2_month, self.S2_day) ''' import pdb pdb.set_trace() self.wl = 0.645, 0.8585, 0.469, 0.555, 1.24, 1.64, 2.13 # vza,sza,vaa,saa modis_toa, modis_angles = grab_modis_toa( year=2006, doy=200, verbose=True, mcd43file= '/home/ucfafyi/DATA/S2_MODIS/m_data/MCD43A1.A2016128.h11v04.006.2016180234038.hdf', directory_l1b="/data/selene/ucfajlg/Bondville_MODIS/THERMAL") self.mcd43_f = glob.glob( '%s/MCD43A1.A%d%03d.h%02dv%02d.006.*.hdf' % (self.mdata, self.year, self.doy, self.h, self.v))[0] if glob.glob(self.mcd43_f + '_S2_aoi_brdf.pkl') == []: self.brdf, self.qa = get_brdf_six(self.mcd43_f, \ (modis_angles[1], modis_angles[0], modis_angles[2]-modis_angles[3]),\ bands=[3,4,1,2,2,6,7], flag=None, Linds= None) pkl.dump(np.array([self.brdf, self.qa]), open(self.mcd43_f + '_S2_aoi_brdf.pkl', 'w')) else: self.brdf, self.qa = pkl.load( open(self.Lfile + '_S2_aoi_brdf.pkl', 'r')) ''' if glob.glob(self.Hfile+'cloud.tif')==[]: cl = classification(fhead = self.Hfile, bands = (2,3,4,8,11,12,13), bounds = None) cl.Get_cm_p() self.cloud = cl.cm.copy() tifffile.imsave(self.Hfile+'cloud.tif', self.cloud.astype(int)) self.H_data = np.repeat(np.repeat(cl.b12, 2, axis=1), 2, axis=0) del cl else: self.cloud = tifffile.imread(self.Hfile+'cloud.tif') struct = ndimage.generate_binary_structure(2, 2) self.dia_cloud = ndimage.binary_dilation(self.cloud.astype(bool), structure=struct, iterations=60).astype(bool) ''' shape = (10000, 10000) xstd, ystd, angle, xs, ys = self.S2_psf[:5] self.shx, self.shy = (self.Hx + xs).astype(int), (self.Hy + ys).astype(int) self.val = (self.Hx + xs < shape[0]) & (self.Hy + ys < shape[1]) & ( self.Hx + xs > 0) & (self.Hy + ys > 0) self.ker = self.gaussian(xstd, ystd, angle, True) retval = parmap(self.S2_get_to_cor, self.bands, nprocs=len(self.bands)) self.S2_mask = np.array(retval)[:, 1, :].astype(bool) self.S2_data = np.array(retval)[:, 0, :] Mcomb_mask = np.all(self.qa < 2, axis=0) Scomb_mask = np.all(self.S2_mask, axis=0) s2 = self.S2_data.copy() br = self.brdf.copy() s2[:, (~Scomb_mask) | (~Mcomb_mask[self.val])] = np.nan s2[np.isnan(s2)], br[np.isnan(br)] = -9999999, -9999999 mas = np.all((br[:, self.val] > 0) & (br[:, self.val] < 1) & (s2 > 0) & (s2 < 1), axis=0) self.to_cor = self.shx[self.val][mas], self.shy[ self.val][mas], s2[:, mas], br[:, self.val][:, mas] dif = self.to_cor[3] - self.to_cor[2] u, d = dif.mean(axis=1) + 3 * dif.std(axis=1), dif.mean( axis=1) - 3 * dif.std(axis=1) in_mask = np.all(np.array([(dif[i] > d[i]) & (dif[i] < u[i]) for i in range(len(dif))]), axis=0) self.to_cor = self.shx[self.val][mas][in_mask], self.shy[self.val][mas][in_mask],\ s2[:,mas][:,in_mask], br[:,self.val][:,mas][:, in_mask] self.qa = magic**(self.qa[:, self.val][:, mas][:, in_mask]) self.emus = parallel_rw_pkl(None, '6S_emulation_S2_', 'r') self.w = (np.array(self.wl))**(-self._alpha) self.w = self.w / (self.w.sum()) patch_pixel = 240 patches = 2400 / 240 t = 1 for i in xrange(patches): for j in xrange(patches): patch_toa = r[t][:, i * patch_pxiel:(i + 1) * patch_pxiel, j * patch_pxiel:(j + 1) * patch_pxiel] patch_boa = self.brdf[:, i * patch_pxiel:(i + 1) * patch_pxiel, j * patch_pxiel:(j + 1) * patch_pxiel] patch_ang = angles[:, i * patch_pxiel:(i + 1) * patch_pxiel, j * patch_pxiel:(j + 1) * patch_pxiel] self.patch_pixs = 300 patches = [] self.inds = [] indx, indy = self.to_cor[:2] self.post_uncs = [] for i in np.arange(0, np.ceil(shape[0] / self.patch_pixs)): for j in np.arange(0, np.ceil(shape[1] / self.patch_pixs)): patch_mask = (indx>i*self.patch_pixs) & (indx<(i+1)*self.patch_pixs)\ & (indy>j*self.patch_pixs) & (indy<(j+1)*self.patch_pixs) if patch_mask.sum() == 0: patches.append(([0, 0, 0], 0)) self.inds.append([i, j]) else: patches.append(self._S2_opt(i, j, patch_mask)) self.inds.append([i, j]) self.inds = np.array(self.inds) paras = np.array([i[0] for i in patches]) cost = np.array([i[1] for i in patches ]).reshape(int(self.inds[:, 0].max() + 1), int(self.inds[:, 1].max() + 1)) para_names = 'aot', 'twv', 'tco' masks = [] para_maps = [] smed_paras = [] unc_maps = [] for _ in range(3): mask = (np.array(paras[:, _]).reshape(cost.shape) == 0) | np.isnan(cost) masks.append(mask) unc = np.array([ np.r_[np.array([i[0], i[1]]), i[2][_]] for i in self.post_uncs ]) unc_map = np.zeros_like(cost) unc_map[:] = np.nan unc_map[unc[:, 0].astype(int), unc[:, 1].astype(int)] = unc[:, 2] unc_maps.append(unc_map) w = np.zeros_like(cost) w[~mask] = 1. / unc_map[~mask] para_map = np.zeros_like(cost) para_map[mask] = paras[:, _].reshape(cost.shape)[~mask].mean() para_map[~mask] = paras[:, _].reshape(cost.shape)[~mask] para_maps.append(para_map) smed_para = smoothn(para_map, s=0.05, W=w**2, isrobust=True)[0] smed_paras.append(smed_para) tifffile.imsave(self.Hfile + '%s.tiff' % para_names[_], smed_para) self.masks, self.para_maps, self.unc_maps, self.smed_paras = masks, para_maps, unc_maps, smed_paras return unc_maps, smed_paras, para_maps
def L8_aot(self): self.wl = np.array([482.04, 561.41, 654.59, 864.67, 1608.86, 2200.73 ]) / 1000 self.bands = [2, 3, 4, 5, 6, 7] pr = get_wrs(self.lat, self.lon) self.path, self.row = pr[0]['path'], pr[0]['row'] self.Hfile = directory + 'l_data/%s_toa_' % (self.L8_fname) self.Lfile = glob.glob( '%s/MCD43A1.A%d%03d.h%02dv%02d.006.*.hdf' % (self.mdata, self.year, self.L8_doy, self.h, self.v))[0] self.sza, self.saa, self.vza, self.vaa, self.dic, self.corners = self.read_meta( self.Hfile, self.path, self.row) self.L_inds, self.H_inds = MSL_geo_trans(self.lat, self.lon, self.dic, self.corners) self.Lx, self.Ly = self.L_inds self.Hx, self.Hy = self.H_inds self.angles = np.zeros((3, 6)) self.angles[0, :] = self.sza self.angles[1, :] = self.vza self.angles[2, :] = self.vaa - self.saa if glob.glob(self.Lfile + '_L8_aoi_brdf.pkl') == []: self.brdf, self.qa = get_brdf_six(self.Lfile, (self.angles[0], self.angles[1], self.angles[2]),\ bands=[3,4,1,2,6,7], flag=None, Linds= self.L_inds) pkl.dump(np.array([self.brdf, self.qa]), open(self.Lfile + '_L8_aoi_brdf.pkl', 'w')) else: self.brdf, self.qa = pkl.load( open(self.Lfile + '_L8_aoi_brdf.pkl', 'r')) cloud = gdal.Open(self.Hfile[:-5] + '_cfmask.tif').ReadAsArray() cl_mask = cloud == 4 # cloud pixels; strictest way is to set the clear pixels with cloud==0 struct = ndimage.generate_binary_structure(2, 2) self.dia_cloud = ndimage.binary_dilation(cl_mask, structure=struct, iterations=20).astype( cl_mask.dtype) shape = self.dia_cloud.shape xstd, ystd, angle, xs, ys = self.L8_psf[:5] self.shx, self.shy = (self.Hx + xs).astype(int), (self.Hy + ys).astype(int) self.val = (self.Hx + xs < shape[0]) & (self.Hy + ys < shape[1]) & ( self.Hx + xs > 0) & (self.Hy + ys > 0) self.ker = self.gaussian(xstd, ystd, angle, True) retval = parmap(self.L8_get_to_cor, self.bands, nprocs=len(self.bands)) self.L8_mask = np.array(retval)[:, 1, :].astype(bool) self.L8_data = np.array(retval)[:, 0, :] Mcomb_mask = np.all(self.qa < 2, axis=0) Lcomb_mask = np.all(self.L8_mask, axis=0) l8 = self.L8_data.copy() br = self.brdf.copy() l8[:, (~Lcomb_mask) | (~Mcomb_mask[self.val])] = np.nan l8[np.isnan(l8)], br[np.isnan(br)] = -9999999, -9999999 mas = np.all((br[:, self.val] > 0) & (br[:, self.val] < 1) & (l8 > 0) & (l8 < 1), axis=0) self.to_cor = self.shx[self.val][mas], self.shy[ self.val][mas], l8[:, mas], br[:, self.val][:, mas] dif = self.to_cor[3] - self.to_cor[2] u, d = dif.mean(axis=1) + 3 * dif.std(axis=1), dif.mean( axis=1) - 3 * dif.std(axis=1) in_mask = np.all(np.array([(dif[i] > d[i]) & (dif[i] < u[i]) for i in range(len(dif))]), axis=0) self.to_cor = self.shx[self.val][mas][in_mask], self.shy[self.val][ mas][in_mask], l8[:, mas][:, in_mask], br[:, self.val][:, mas][:, in_mask] self.qa = magic**(self.qa[:, self.val][:, mas][:, in_mask]) self.emus = parallel_rw_pkl(None, '6S_emulation_L8_', 'r') self.w = (np.array(self.wl))**(-self._alpha) self.w = self.w / self.w.sum() self.patch_pixs = 100. patches = [] self.inds = [] indx, indy = self.to_cor[:2] self.post_uncs = [] for i in np.arange(0, np.ceil(shape[0] / self.patch_pixs)): for j in np.arange(0, np.ceil(shape[1] / self.patch_pixs)): patch_mask = (indx>i*self.patch_pixs) & (indx<(i+1)*self.patch_pixs)\ & (indy>j*self.patch_pixs) & (indy<(j+1)*self.patch_pixs) if patch_mask.sum() == 0: patches.append(([0, 0, 0], 0)) self.inds.append([i, j]) else: patches.append(self._l8_opt(i, j, patch_mask)) self.inds.append([i, j]) self.inds = np.array(self.inds) paras = np.array([i[0] for i in patches]) cost = np.array([i[1] for i in patches ]).reshape(int(self.inds[:, 0].max() + 1), int(self.inds[:, 1].max() + 1)) para_names = 'aot', 'twv', 'tco' masks = [] para_maps = [] smed_paras = [] unc_maps = [] for _ in range(3): mask = (np.array(paras[:, _]).reshape(cost.shape) == 0) | np.isnan(cost) masks.append(mask) unc = np.array([ np.r_[np.array([i[0], i[1]]), i[2][_]] for i in self.post_uncs ]) unc_map = np.zeros_like(cost) unc_map[:] = np.nan unc_map[unc[:, 0].astype(int), unc[:, 1].astype(int)] = unc[:, 2] unc_maps.append(unc_map) w = np.zeros_like(cost) w[~mask] = 1. / unc_map[~mask] para_map = np.zeros_like(cost) para_map[mask] = paras[:, _].reshape(cost.shape)[~mask].mean() para_map[~mask] = paras[:, _].reshape(cost.shape)[~mask] para_maps.append(para_map) smed_para = smoothn(para_map, s=0.05, W=w**2, isrobust=True)[0] smed_paras.append(smed_para) tifffile.imsave(self.Hfile + '%s.tiff' % para_names[_], smed_para) self.masks, self.para_maps, self.unc_maps = masks, para_maps, unc_maps return unc_maps, smed_paras, para_maps
#mask = ((d == 0).sum(axis=0) == d.shape[0]) #idx = np.where(mask==False) client = Client(n_workers=1, threads_per_worker=1, memory_limit='12GB') #print(d[:, idx[0], idx[1]]) bands, rows, cols = d.shape block = 50 for start_row in range(0, rows, block): if start_row + block > rows: end_row = rows else: end_row = start_row + block tmp_data = d[:, start_row:end_row + 1, :].compute().data print(start_row) smoothed_data[:, start_row:end_row + 1, :] = \ smoothn(tmp_data, isrobust=True, s=0.75, TolZ=1e-6, axis=0)[0] # smoothed_data[:, idx[0], idx[1]] = smoothn(d[:, idx[0], idx[1]], # isrobust=True, s=0.75, TolZ=1e-6, axis=0)[0] #smoothed_data = smoothn(d.compute().data, # isrobust=True, s=0.75, TolZ=1e-6, axis=0)[0] smoothed_data.to_netcdf('smoothed.nc')