def run(self, *args, **kwargs): meta = pyrat.data.getAnnotation(layer=self.layer) li = pyrat.query(layer=self.layer) odim = li.shape nry = odim[-2] odim[-2] //= self.suby odim[-1] //= self.subx outlayer = pyrat.data.addLayer(dtype=li.dtype, shape=odim) blockdim = odim.copy() blockdim[-2] = 1 P = pyrat.tools.ProgressBar(' ' + self.name, odim[-2]) P.update(0) for k in range(odim[-2]): arr = pyrat.getdata(block=(k * self.suby, (k + 1) * self.suby, 0, odim[-1] * self.subx), layer=self.layer) if self.decimate is True: arr = arr[..., ::self.suby, ::self.subx] else: arr = rebin(arr, tuple(blockdim)) pyrat.data.setData(arr, block=(k, k + 1, 0, 0), layer=outlayer) P.update(k + 1) del P pyrat.activate(outlayer) if "geo_ps_east" in meta and "geo_ps_north" in meta: meta['geo_ps_east'] = meta["geo_ps_east"] * self.subx meta['geo_ps_north'] = meta["geo_ps_north"] * self.suby pyrat.data.setAnnotation(meta, layer=outlayer) return outlayer
def run(self, *args, **kwargs): layer = pyrat.data.active # STEP1: Estimate spectrum self.vblock = False rgspec = self.layer_accumulate(self.estimate_spectrum, axis='range', combine=self.combine_spectrum, silent=False) self.vblock = True azspec = self.layer_accumulate(self.estimate_spectrum, axis='azimuth', combine=self.combine_spectrum, silent=False) # STEP2: Adjust spectra rgcorr, rgcent = self.spec_correction(rgspec, alpha=self.alpha, fix=self.fix, cutoff=self.cutoff, func=(self.func, self.alpha)) azcorr, azcent = self.spec_correction(azspec, alpha=self.alpha, fix=self.fix, cutoff=self.cutoff, func=(self.func, self.alpha)) if self.center is False: azcent, rgcent = None, None # STEP3: Weight / Unweight if self.axis == 'range' or self.axis == 'both': self.vblock = False outlayer1 = self.layer_process(self.unweight_spectrum, axis='range', correction=(azcorr, rgcorr), center=(azcent, rgcent), silent=False) if self.axis == 'azimuth' or self.axis == 'both': self.vblock = True outlayer2 = self.layer_process(self.unweight_spectrum, axis='azimuth', correction=(azcorr, rgcorr), center=(azcent, rgcent), layer=outlayer1, silent=False) pyrat.delete(outlayer1) pyrat.activate(outlayer2) return outlayer2
def run(self, *args, **kwargs): layer = pyrat.data.active # STEP1: Estimate profiles azprof, rgprof = self.layer_accumulate(self.estimate_profiles, combine=self.combine_profiles) # STEP2: Fit correction rgprof /= np.mean(rgprof, axis=-1, keepdims=True) azprof /= np.mean(azprof, axis=-1, keepdims=True) # todo: from here on adapt to nd-data sets rgaxis = np.arange(rgprof.shape[-1]) azaxis = np.arange(azprof.shape[-1]) rgcorr = np.empty_like(rgprof) azcorr = np.empty_like(azprof) if rgprof.ndim == 1: rgcorr = np.polyval(np.polyfit(rgaxis, rgprof, self.order), rgaxis) azcorr = np.polyval(np.polyfit(azaxis, azprof, self.order), azaxis) elif rgprof.ndim == 2: for k in range(rgprof.shape[0]): rgcorr[k, :] = np.polyval(np.polyfit(rgaxis, rgprof[k, :], self.order), rgaxis) azcorr[k, :] = np.polyval(np.polyfit(azaxis, azprof[k, :], self.order), azaxis) elif rgprof.ndim == 3: for k in range(rgprof.shape[0]): for l in range(rgprof.shape[1]): rgcorr[k, l, :] = np.polyval(np.polyfit(rgaxis, rgprof[k, l, :], self.order), rgaxis) azcorr[k, l, :] = np.polyval(np.polyfit(azaxis, azprof[k, l, :], self.order), azaxis) # STEP3: Apply correction outlayer = self.layer_process(self.applyfix, axis=self.axis, correction=(azcorr, rgcorr), siltent=False, **kwargs) pyrat.activate(outlayer) return outlayer
def run(self, *args, **kwargs): meta = pyrat.data.getAnnotation(layer=self.layer) li = pyrat.query(layer=self.layer) odim = li.shape nry = odim[-2] odim[-2] //= self.suby odim[-1] //= self.subx outlayer = pyrat.data.addLayer(dtype=li.dtype, shape=odim) blockdim = odim.copy() blockdim[-2] = 1 P = pyrat.tools.ProgressBar(' ' + self.name, odim[-2]) P.update(0) for k in range(odim[-2]): arr = pyrat.getdata(block=(k*self.suby, (k+1)*self.suby, 0, odim[-1] * self.subx), layer=self.layer) arr = rebin(arr, tuple(blockdim)) pyrat.data.setData(arr, block=(k, k+1, 0, 0), layer=outlayer) P.update(k + 1) del P pyrat.activate(outlayer) if "geo_ps_east" in meta and "geo_ps_north" in meta: meta['geo_ps_east'] = meta["geo_ps_east"] * self.subx meta['geo_ps_north'] = meta["geo_ps_north"] * self.suby pyrat.data.setAnnotation(meta, layer=outlayer) return outlayer
def run(self, *args, **kwargs): P = ProgressBar(' ' + self.name, 10) bounds = opt.fmin(self.optf, [0.5, 2.0], args=(self.looks, self.sigma), disp=False) # calc sigma bounds newsig = self.newsig(bounds[0], bounds[1], sigrng=self.sigma, looks=self.looks) # calc new stddev P.update(0) perc = 100.0 - self.perc * 100.0 # point target theshold pthreshold = np.mean( self.layer_accumulate(self.estimate_percentile, type=self.type, perc=perc)) P.update(2) layer = self.layer_process(self.leeimproved, bounds=bounds, newsig=newsig, thres=pthreshold, looks=self.looks, win=self.win, type=self.type) P.update(10) del P pyrat.activate(layer) return layer
def run(self, *args, **kwargs): para = [foo['var'] for foo in self.para] self.checkpara(kwargs, para) logging.info( self.name + ' ' + str(dict((k, v) for k, v in self.__dict__.items() if k in para or k in kwargs))) tmp_filename = tempfile.mktemp(suffix='.hd5', prefix='pyrat_', dir=pyrat.data.tmpdir) shutil.copyfile(self.file, tmp_filename) lay = pyrat.data.addLayer(file=tmp_filename) pyrat.activate(lay)
def run(self, *args, **kwargs): lay0 = pyrat.data.active # STEP1: Estimate spectra lay1 = pyrat.filter.unweight(layer=lay0, center=True, fix=True, cutoff=self.cutoff) lay2 = pyrat.filter.weight(layer=lay0, center=False, fix=False, func='Hamming', alpha=0.5, cutoff=self.cutoff) lay3 = self.layer_process(self.cda, layer=[lay1, lay2]) pyrat.delete(lay1) pyrat.delete(lay2) pyrat.activate(lay3)
def run(self, *args, **kwargs): para = [foo['var'] for foo in self.para] self.checkpara(kwargs, para) logging.info(self.name + ' ' + str( dict((k, v) for k, v in self.__dict__.items() if k in para or k in kwargs))) tmp_filename = tempfile.mktemp(suffix='.hd5', prefix='pyrat_', dir=pyrat.data.tmpdir) shutil.copyfile(self.file, tmp_filename) lay = pyrat.data.addLayer(file=tmp_filename) pyrat.activate(lay)
def run(self, *args, **kwargs): layer = pyrat.data.active # STEP1: Estimate profiles azprof, rgprof = self.layer_accumulate(self.estimate_profiles, combine=self.combine_profiles) # STEP2: Fit correction rgprof /= np.mean(rgprof, axis=-1, keepdims=True) azprof /= np.mean(azprof, axis=-1, keepdims=True) # todo: from here on adapt to nd-data sets rgaxis = np.arange(rgprof.shape[-1]) azaxis = np.arange(azprof.shape[-1]) rgcorr = np.empty_like(rgprof) azcorr = np.empty_like(azprof) if rgprof.ndim == 1: rgcorr = np.polyval(np.polyfit(rgaxis, rgprof, self.order), rgaxis) azcorr = np.polyval(np.polyfit(azaxis, azprof, self.order), azaxis) elif rgprof.ndim == 2: for k in range(rgprof.shape[0]): rgcorr[k, :] = np.polyval( np.polyfit(rgaxis, rgprof[k, :], self.order), rgaxis) azcorr[k, :] = np.polyval( np.polyfit(azaxis, azprof[k, :], self.order), azaxis) elif rgprof.ndim == 3: for k in range(rgprof.shape[0]): for l in range(rgprof.shape[1]): rgcorr[k, l, :] = np.polyval( np.polyfit(rgaxis, rgprof[k, l, :], self.order), rgaxis) azcorr[k, l, :] = np.polyval( np.polyfit(azaxis, azprof[k, l, :], self.order), azaxis) # STEP3: Apply correction outlayer = self.layer_process(self.applyfix, axis=self.axis, correction=(azcorr, rgcorr), siltent=False, **kwargs) pyrat.activate(outlayer) return outlayer
def run(self, *args, **kwargs): P = ProgressBar(' ' + self.name, self.iter) P.update(0) for k in range(self.iter): if k != 0: oldlayer = newlayer newlayer = self.layer_process(self.srad, looks=self.looks, step=self.step, iter=k, scale=self.scale, type=self.type) if k != 0: pyrat.delete(oldlayer, silent=True) pyrat.activate(newlayer, silent=True) P.update(k + 1) del P pyrat.activate(newlayer) return newlayer
def run(self, *args, **kwargs): li = pyrat.query(layer=self.layer) odim = li.shape nry = odim[-2] odim[-2] //= self.suby odim[-1] //= self.subx outlayer = pyrat.data.addLayer(dtype=li.dtype, shape=odim) blockdim = odim.copy() blockdim[-2] = 1 P = ProgressBar(' ' + self.name, odim[-2]) P.update(0) for k in range(odim[-2]): arr = pyrat.getdata(block=(k*self.suby, (k+1)*self.suby, 0, odim[-1] * self.subx), layer=self.layer) arr = rebin(arr, tuple(blockdim)) pyrat.data.setData(arr, block=(k, k+1, 0, 0), layer=outlayer) P.update(k + 1) del P pyrat.activate(outlayer) return outlayer
def run(self, *args, **kwargs): l_cov = pyrat.data.active outsize = pyrat.data.shape[-2:] # STEP0: Random initialisation l_init = self.layer_fromfunc(self.init_random, size=outsize, nclass=self.nclass) P = ProgressBar(' ' + self.name, self.niter) P.update(0) for iter in range(self.niter): # STEP1: Calculate cluster centres (and their frequency) pyrat.activate([l_cov, l_init], silent=True) cc, nc = self.layer_accumulate(self.calc_centers, nclass=self.nclass, combine=self.combine_mean) pyrat.delete(l_init, silent=True) # STEP2: Eliminate empty classes for k, n in enumerate(nc): if n == 0: del cc[k] del nc[k] # STEP3: Calculate class memberships pyrat.activate(l_cov, silent=True) l_init = self.layer_process(self.assign_classes, centers=cc) P.update(iter+1) del P pyrat.activate(l_init) return l_init
def run(self, *args, **kwargs): l_cov = pyrat.data.active outsize = pyrat.data.shape[-2:] # STEP0: Random initialisation l_init = self.layer_fromfunc(self.init_random, size=outsize, nclass=self.nclass) P = ProgressBar(' ' + self.name, self.niter) P.update(0) for iter in range(self.niter): # STEP1: Calculate cluster centres (and their frequency) pyrat.activate([l_cov, l_init], silent=True) cc, nc = self.layer_accumulate(self.calc_centers, nclass=self.nclass, combine=self.combine_mean) pyrat.delete(l_init, silent=True) # STEP2: Eliminate empty classes for k, n in enumerate(nc): if n == 0: del cc[k] del nc[k] # STEP3: Calculate class memberships pyrat.activate(l_cov, silent=True) l_init = self.layer_process(self.assign_classes, centers=cc) P.update(iter + 1) del P pyrat.activate(l_init) return l_init
def run(self, *args, **kwargs): if isinstance(self.layer, list): # Means the init layer is also active l_cov = self.layer[0] l_init = self.layer[1] else: l_cov = self.layer l_init = [] outsize = pyrat.data.shape[-2:] meta = pyrat.getmeta(layer=l_cov) # STEP0: Do random initialisation if l_init is empty if len(l_init) == 0: l_init = self.layer_fromfunc(self.init_random, size=outsize, nclass=self.nclass) P = pyrat.tools.ProgressBar(' ' + self.name, self.niter) P.update(0) for iter in range(self.niter): # STEP1: Calculate cluster centres (and their frequency) pyrat.activate([l_cov, l_init], silent=True) cc, nc = self.layer_accumulate(self.calc_centers, nclass=self.nclass, combine=self.combine_mean) pyrat.delete(l_init, silent=True) # STEP2: Eliminate empty classes cc = [cc[k] for k, n in enumerate(nc) if n != 0] nc = [nc[k] for k, n in enumerate(nc) if n != 0] # STEP3: Calculate class memberships pyrat.activate(l_cov, silent=True) l_init = self.layer_process(self.assign_classes, centers=cc) P.update(iter + 1) del P meta['Class Centers'] = cc pyrat.setmeta(meta, layer=l_init) pyrat.activate(l_init) return l_init