def _prox_analysis(self, src_gdal, region=None): ds_config = demfun.gather_infos(src_gdal) if region is not None: srcwin = region.srcwin(ds_config['geoT'], ds_config['nx'], ds_config['ny']) else: srcwin = (0, 0, ds_config['nx'], ds_config['ny']) ds_arr = src_gdal.GetRasterBand(1).ReadAsArray(srcwin[0], srcwin[1], srcwin[2], srcwin[3]) prox_perc = np.percentile(ds_arr, 95) dst_arr = None return(prox_perc)
def _mask_analysis(self, src_gdal, region=None): ds_config = demfun.gather_infos(src_gdal) if region is not None: srcwin = region.srcwin(ds_config['geoT'], ds_config['nx'], ds_config['ny']) else: srcwin = (0, 0, ds_config['nx'], ds_config['ny']) ds_arr = src_gdal.GetRasterBand(1).ReadAsArray(srcwin[0], srcwin[1], srcwin[2], srcwin[3]) msk_sum = np.sum(ds_arr) msk_max = float(srcwin[2] * srcwin[3]) msk_perc = float((msk_sum / msk_max) * 100.) dst_arr = None return(msk_sum, msk_max, msk_perc)
def createNullCopy(srcfile, outfile, nodata, outformat, verbose, overwrite): '''copy a gdal grid and make a nodata grid''' ds = gdal.Open(srcfile) ds_config = demfun.gather_infos(ds) gt = ds_config['geoT'] if nodata is None: nodata = ds_config['ndv'] if nodata is None: nodata = -9999 ds_config['ndv'] = nodata ds = None dsArray = np.zeros([ds_config['ny'], ds_config['nx']]) dsArray[:] = float(nodata) utils.gdal_write(dsArray, outf, ds_config)
def _sub_region_analysis(self, sub_regions): """sub-region analysis""" utils.echo_msg('analyzing {} sub-regions...'.format(len(sub_regions))) sub_zones = {} dem_ds = gdal.Open(self.dem.fn) msk_ds = gdal.Open(self.dem.mask_fn) prox_ds = gdal.Open(self.prox) #slp_ds = gdal.Open(self.slope) _prog = utils.CliProgress('analyzing {} sub-regions.'.format(len(sub_regions))) for sc, sub_region in enumerate(sub_regions): _prog.update_perc((sc, len(sub_regions))) #utils.echo_msg_inline('analyzing sub-regions [{}]'.format(sc)) s_sum, s_g_max, s_perc = self._mask_analysis(msk_ds, region=sub_region) p_perc = self._prox_analysis(prox_ds, region=sub_region) #slp_perc = self._prox_analysis(slp_ds, region=sub_region) #slp_perc = 0 s_dc = demfun.gather_infos(dem_ds, region=sub_region, scan=True) if p_perc < self.prox_perc_33 or abs(p_perc - self.prox_perc_33) < 0.01: zone = self._zones[2] elif p_perc < self.prox_perc_66 or abs(p_perc - self.prox_perc_66) < 0.01: zone = self._zones[1] else: zone = self._zones[0] # if slp_perc < self.slp_perc_33 or abs(slp_perc - self.slp_perc_33) < 0.01: # zone = self._zones[3] # elif slp_perc < self.slp_perc_66 or abs(slp_perc - self.slp_perc_66) < 0.01: # zone = self._zones[4] # else: # zone = self._zones[5] #sub_zones[sc + 1] = [sub_region, s_g_max, s_sum, s_perc, p_perc, slp_perc, s_dc['zr'][0], s_dc['zr'][1], zone] sub_zones[sc + 1] = [sub_region, s_g_max, s_sum, s_perc, p_perc, zone] dem_ds = msk_ds = prox_ds = slp_ds = None _prog.end(0, 'analyzed {} sub-regions.'.format(len(sub_regions))) #utils.echo_msg_inline('analyzing sub-regions [OK]\n') return(sub_zones)
elif arg[0] == '-': Usage() elif ingrd is None: ingrd = arg elif outgrd is None: outgrd = arg else: Usage() i = i + 1 if ingrd is None or outgrd is None: Usage() sys.exit(0) ds = gdal.Open(ingrd) ds_config = demfun.gather_infos(ds) band = ds.GetRasterBand(1) in_ndata = band.GetNoDataValue() if in_ndata is None: in_ndata = -9999 comp_geot = ds_config['geoT'] outarray = ds.ReadAsArray() if bin_mask: out_data = 0 else: out_data = ds_config['ndv'] if eq_data is not None:
def _split_sample(self, trains, perc): """split-sample simulations and error calculations sims = max-simulations """ #utils.echo_msg('performing MAX {} SPLIT-SAMPLE simulations...'.format(self.sims)) _prog = utils.CliProgress('performing MAX {} SPLIT-SAMPLE simulations'.format(self.sims)) #utils.echo_msg('simulation\terrors\tproximity-coeff\tp_diff\tslp-coeff\tslp_diff') utils.echo_msg('simulation\terrors\tproximity-coeff\tp_diff') sim = 0 status = 0 last_ec_d = None while True: status = 0 sim += 1 #trains = self._regions_sort(trainers, verbose=False) for z, train in enumerate(trains): train_h = train[:25] ss_samp = perc ## ============================================== ## perform split-sample analysis on each training region. ## ============================================== for n, sub_region in enumerate(train_h): ss_samp = perc #perc = int(float(n+(len(train_h) * z))/(len(train_h)*len(trains)) * 100) #_prog.update_perc((int(float(n+(len(train_h) * z))), len(train_h)*len(trains))) _prog.update() this_region = sub_region[0] if sub_region[3] < ss_samp: ss_samp = None ## ============================================== ## extract the xyz data for the region from the DEM ## ============================================== o_xyz = '{}_{}.xyz'.format(self.dem.name, n) ds = gdal.Open(self.dem.fn) ds_config = demfun.gather_infos(ds) b_region = this_region b_region.buffer(20*self.dem.inc) srcwin = b_region.srcwin(ds_config['geoT'], ds_config['nx'], ds_config['ny']) with open(o_xyz, 'w') as o_fh: for xyz in demfun.parse(ds, srcwin=srcwin, mask=self.dem.mask_fn): xyz.dump(dst_port=o_fh) ds = None if os.stat(o_xyz).st_size != 0: ## ============================================== ## split the xyz data to inner/outer; outer is ## the data buffer, inner will be randomly sampled ## ============================================== s_inner, s_outer = self._gmt_select_split( o_xyz, this_region, 'sub_{}'.format(n), verbose=False ) if os.stat(s_inner).st_size != 0: sub_xyz = np.loadtxt(s_inner, ndmin=2, delimiter=' ') else: sub_xyz = [] ss_len = len(sub_xyz) if ss_samp is not None: sx_cnt = int(sub_region[1] * (ss_samp / 100.)) + 1 else: sx_cnt = 1 sub_xyz_head = 'sub_{}_head.xyz'.format(n) np.random.shuffle(sub_xyz) np.savetxt(sub_xyz_head, sub_xyz[:sx_cnt], '%f', ' ') ## ============================================== ## generate the random-sample DEM ## ============================================== waff = waffles.WaffleFactory( mod=self.dem.mod, data=[s_outer, sub_xyz_head], src_region=this_region, inc=self.dem.inc, name='sub_{}'.format(n), node=self.dem.node, fmt=self.dem.fmt, extend=self.dem.extend, extend_proc=self.dem.extend_proc, weights=self.dem.weights, sample=self.dem.sample, clip=self.dem.clip, epsg=self.dem.epsg, mask=True, verbose=False, clobber=True ) waff.mod_args = self.dem.mod_args wf = waff.acquire().generate() if wf.valid_p(): ## ============================================== ## generate the random-sample data PROX and SLOPE ## ============================================== sub_prox = '{}_prox.tif'.format(wf.name) demfun.proximity('{}_m.tif'.format(wf.name), sub_prox) #sub_slp = '{}_slp.tif'.format(wf.name) #demfun.slope(wf.fn, sub_slp) ## ============================================== ## Calculate the random-sample errors ## ============================================== sub_xyd = demfun.query(sub_xyz[sx_cnt:], wf.fn, 'xyd') #sub_dp = gdalfun.gdal_query(sub_xyd, sub_prox, 'zg') sub_dp = demfun.query(sub_xyd, sub_prox, 'xyzg') #sub_ds = demfun.query(sub_dp, self.slope, 'g') #if len(sub_dp) > 0: # if sub_dp.shape[0] == sub_ds.shape[0]: # sub_dp = np.append(sub_dp, sub_ds, 1) # else: sub_dp = [] else: sub_dp = None utils.remove_glob(sub_xyz_head) #if s_dp is not None: if sub_dp is not None and len(sub_dp) > 0: try: s_dp = np.concatenate((s_dp, sub_dp), axis = 0) except: s_dp = sub_dp #else: s_dp = sub_dp utils.remove_glob(o_xyz, 'sub_{}*'.format(n)) if len(s_dp) > 0: d_max = self.region_info[self.dem.name][4] #s_max = self.region_info[self.dem.name][5] s_dp = s_dp[s_dp[:,3] < d_max,:] s_dp = s_dp[s_dp[:,3] > 0,:] prox_err = s_dp[:,[2,3]] if last_ec_d is None: last_ec_d = [0, 0.1, 0.2] last_ec_diff = 10 else: last_ec_diff = abs(last_ec_d[2] - last_ec_d[1]) ec_d = self._err2coeff(prox_err[:50000000], coeff_guess=last_ec_d, dst_name=self.dem.name + '_prox', xa='distance') ec_diff = abs(ec_d[2] - ec_d[1]) ec_l_diff = abs(last_ec_diff - ec_diff) # s_dp = s_dp[s_dp[:,4] < s_max,:] # slp_err = s_dp[:,[2,4]] # #print(slp_err) # #ec_s = self._err2coeff(slp_err[:50000000], coeff_guess=[0, 0.1, 0.2], dst_name = self.dem.name + '_slp', xa = 'slope') # ec_s = [0, 1, 2] # utils.echo_msg('{}\t{}\t{}\t{}\t{}\t{}'.format(sim, len(s_dp), ec_d, ec_d[2] - ec_d[1], ec_s, ec_s[2] - ec_s[1])) utils.echo_msg('{}\t{}\t{}\t{}'.format(sim, len(s_dp), ec_d, ec_l_diff)) #if ec_d[2] < 0.0001: continue #if abs(ec_d[2] - ec_d[1]) > 2: continue if ec_d[0] == 0 and ec_d[1] == 0.1 and ec_d[2] == 0.2: continue if sim >= int(self.sims): break if abs(last_ec_diff - ec_diff) < 0.0001: break if len(s_dp) >= int(self.region_info[self.dem.name][1] / 10): break last_ec_d = ec_d #else: utils.echo_msg('{}\t{}\t{}\t{}\t{}\t{}'.format(sim, len(s_dp), None, None, None, None)) else: utils.echo_msg('{}\t{}\t{}\t{}'.format(sim, len(s_dp), None, None)) _prog.end(status, 'performed {} SPLIT-SAMPLE simulations'.format(sim)) return([ec_d])
def run(self): for xdl in self.data: data_lists = {} if self.recursive: xdl.parse_data_lists() data_lists = xdl.data_lists else: for e in xdl.parse(): while e.parent != xdl: e = e.parent if e.metadata['name'] not in data_lists.keys(): data_lists[e.metadata['name']] = {'data': [e] ,'dl': e} for x in data_lists.keys(): utils.echo_msg('Working on {}'.format(x)) xdl.data_entries = data_lists[x]['data'] if self.recursive: p = data_lists[x]['parent'] else: p = data_lists[x]['dl'] o_v_fields = [ p.metadata['title'] if p.metadata['title'] is not None else x, p.metadata['source'], p.metadata['date'], p.metadata['data_type'], p.metadata['resolution'], p.metadata['hdatum'], p.metadata['vdatum'], p.metadata['url'] ] defn = None if self.layer is None else self.layer.GetLayerDefn() dl_name = x mask_ds, mask_config = xdl.mask_xyz(self.xinc, self.yinc) if demfun.gather_infos(mask_ds, scan=True)['zr'][1] == 1: tmp_ds = ogr.GetDriverByName('Memory').CreateDataSource( '{}_poly'.format(dl_name) ) if tmp_ds is not None: tmp_layer = tmp_ds.CreateLayer( '{}_poly'.format(dl_name), None, ogr.wkbMultiPolygon ) tmp_layer.CreateField(ogr.FieldDefn('DN', ogr.OFTInteger)) if self.verbose: utils.echo_msg('polygonizing {} mask...'.format(dl_name)) mask_band = mask_ds.GetRasterBand(1) status = gdal.Polygonize( mask_band, None, tmp_layer, 0, callback = gdal.TermProgress if self.verbose else None ) if len(tmp_layer) > 0: if defn is None: defn = tmp_layer.GetLayerDefn() out_feat = gdal_ogr_mask_union(tmp_layer, 'DN', defn) utils.echo_msg('creating feature {}...'.format(dl_name)) for i, f in enumerate(self.v_fields): out_feat.SetField(f, o_v_fields[i]) self.layer.CreateFeature(out_feat) if self.verbose: utils.echo_msg('polygonized {}'.format(dl_name)) tmp_ds = tmp_layer = out_feat = None mask_ds = mask_band = None utils.echo_msg('Generated SPATIAL METADATA {}'.format(self.name))