def setUp(self): file = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'data/20110214_20110401_ml4_sm.unw.geo_ig_dsc_ionnocorr.mat') self.sc = Scene() self.sc.setLogLevel('ERROR') self.sc.import_data(file) self.sc.meta.scene_title = 'Matlab Input - Myanmar 2011-02-14'
def get_scene(): sc = Scene() sc.frame.llLat = 52.395833 sc.frame.llLon = 13.061389 sc.frame.dE = 0.001 sc.frame.dN = 0.001 sc.frame.spacing = "degree" sc.displacement = num.zeros((500, 500)) return sc
def test_deramp(): c = num.arange(20, dtype=num.float) E, N = num.meshgrid(c, c) displ = (-3 + 5.4*E) + (10 + 2.5*N) sc = Scene(displacement=displ, llLat=0, llLon=0., dLat=.3, dLon=.3) sc.displacement_deramp(demean=True, inplace=True) coeffs = sc.get_ramp_coefficients() num.testing.assert_almost_equal(coeffs, num.zeros_like(coeffs))
def loadData(dataFldr,ifgName,corName,maskFile=None): ''' Return the Kite scene from a folder, e.g., Tr43dsc/20200128_20200116. Automatically search for the unwrapped ifg, correlation files. Files should be 2-stage unwrapped and geocoded. ''' # Formulate IFG name ifgName = os.path.join(dataFldr,ifgName) corName = os.path.join(dataFldr,corName) # Load as Kite Scene sc = Scene.import_data(ifgName) # Load correlation file corDS = gdal.Open(corName, gdal.GA_ReadOnly) cor = corDS.GetRasterBand(2).ReadAsArray() cor = np.flipud(cor) # Load mask if specified if maskFile: mskDS = gdal.Open(maskFile, gdal.GA_ReadOnly) msk = mskDS.GetRasterBand(1).ReadAsArray() msk = np.flipud(msk) else: msk = None # Report print('Loaded scene: {:s}'.format(ifgName)) print('Loaded cor file {:s}'.format(corName)) return sc, cor, msk
def load(path, kite_scene=True, grid=False, path_cc=None): ''' Load data from a path or optional from a kite scene. ''' if kite_scene is True: sc = Scene.load(path) img = sc.displacement unw = img.copy() where_are_NaNs = num.isnan(img) img[where_are_NaNs] = 0 coh = img # TODO load in coherence dates = [sc.meta.time_slave, sc.meta.time_master] if grid is True: unw = Image.open(path) img = num.array(unw, num.float32) where_are_NaNs_dsc = num.isnan(img) img[where_are_NaNs_dsc] = 0 coh_load = Image.open(path_cc) coh = num.array(coh_load, num.float32) where_are_NaNs = num.isnan(coh) coh[where_are_NaNs] = 0 sc = None dates = None return img, coh, sc, dates
def add_kite_scene(self, filename): try: from kite import Scene except ImportError: raise ImportError('module kite could not be imported,' ' please install from https://pyrocko.org') logger.debug('loading kite scene from %s' % filename) scene = Scene() scene._log.setLevel(logger.level) scene.load(filename) try: self.get_kite_scene(scene.meta.scene_id) except NotFound: self.kite_scenes.append(scene) else: raise AttributeError('kite scene_id not unique for %s' % filename)
def f(self): common.get_test_data(dl_path) if filename is None: load_path = dl_path else: load_path = filename load_path = op.join(common.data_dir, load_path) fn_save = op.join(self.tmp_dir, "kite-%s" % fmt) sc1 = Scene.import_data(load_path) sc1.save(fn_save) sc2 = Scene.load(fn_save) num.testing.assert_equal(sc1.displacement, sc2.displacement) num.testing.assert_equal(sc1.phi, sc2.phi) num.testing.assert_equal(sc1.theta, sc2.theta)
def testIO(self): import tempfile import shutil tmp_dir = tempfile.mkdtemp(prefix='kite') file = os.path.join(tmp_dir, self.__class__.__name__) sc1 = self.sc sc1.quadtree.epsilon = .120 sc1.quadtree.tile_size_min = 50 sc1.quadtree.tile_size_max = 23000 sc1.quadtree.nan_allowed = .9 try: sc1.save(file) sc2 = Scene() sc2.setLogLevel('ERROR') sc2.load(file) self.assertEqual(sc1.quadtree.epsilon, sc2.quadtree.epsilon) self.assertEqual(sc1.quadtree.nan_allowed, sc2.quadtree.nan_allowed) self.assertEqual(sc1.quadtree.tile_size_min, sc2.quadtree.tile_size_min) self.assertEqual(sc1.quadtree.tile_size_max, sc2.quadtree.tile_size_max) self.assertEqual(sc1.quadtree.nleafs, sc2.quadtree.nleafs) self.assertEqual([l.id for l in sc1.quadtree.leafs], [l.id for l in sc2.quadtree.leafs]) finally: shutil.rmtree(tmp_dir)
def get_insar_scenes(self): from kite import Scene if self._scenes is None: self._scenes = [] path_insar = self.get_path('insar') util.ensuredir(path_insar) fns = util.select_files([path_insar], regex='\.(npz)$', show_progress=False) for f in fns: self._scenes.append(Scene.load(f)) return self._scenes
def get_insar_scenes(self): from kite import Scene if self._scenes is None: self._scenes = [] path_insar = self.get_path('insar') util.ensuredir(path_insar) fns = util.select_files([path_insar], regex='\\.(npz)$', show_progress=False) for f in fns: self._scenes.append(Scene.load(f)) return self._scenes
def create_kite_scene_asc(store_id, dip, depth, patches, llLat=0., llLon=0.): km = 1e3 d2r = num.pi/180. engine = gf.LocalEngine(store_superdirs=['.']) # Define the scene's frame frame = FrameConfig( # Lower left geographical reference [deg] llLat=llLat, llLon=llLon, # Pixel spacing [m] or [degrees] spacing='degrees', dE=550, dN=550) # Resolution of the scene npx_east = 1400 npx_north = 1400 # 2D arrays for displacement and look vector displacement = num.empty((npx_east, npx_north)) # Look vectors # Theta is elevation angle from horizon theta = num.full_like(displacement, 56.*d2r) # Phi is azimuth towards the satellite, counter-clockwise from East phi = num.full_like(displacement, -166.*d2r) scene = Scene( displacement=displacement, phi=phi, theta=theta, frame=frame) satellite_target = gf.KiteSceneTarget( scene, store_id=store_id) sources = CombiSource(subsources=patches) result = engine.process( sources, satellite_target, # Use all available cores nthreads=0) kite_scenes = result.kite_scenes() return kite_scenes
def testIO(self): import tempfile import shutil tmp_dir = tempfile.mkdtemp(prefix='kite') # print(tmp_dir) file = os.path.join(tmp_dir, self.__class__.__name__) sc1 = self.sc sc1.quadtree.epsilon = .076 sc1.quadtree.tile_size_min = 50 sc1.quadtree.tile_size_max = 12773 sc1.quadtree.nan_allowed = .8 sc1.covariance.config.a = 0.008 sc1.covariance.config.b = 300.2 sc1.covariance.config.variance = .2 sc1.covariance.covariance_matrix try: sc1.save(file) sc2 = Scene() sc2.setLogLevel('ERROR') sc2.load(file) self.assertEqual(sc1.quadtree.epsilon, sc2.quadtree.epsilon) self.assertEqual(sc1.quadtree.nan_allowed, sc2.quadtree.nan_allowed) self.assertEqual(sc1.quadtree.tile_size_min, sc2.quadtree.tile_size_min) self.assertEqual(sc1.quadtree.tile_size_max, sc2.quadtree.tile_size_max) self.assertEqual(sc1.quadtree.nleafs, sc2.quadtree.nleafs) self.assertEqual([l.id for l in sc1.quadtree.leafs], [l.id for l in sc2.quadtree.leafs]) self.assertEqual(sc1.covariance.variance, sc2.covariance.variance) self.assertEqual(sc1.covariance.covariance_model, sc2.covariance.covariance_model) num.testing.assert_equal(sc1.covariance.weight_matrix_focal, sc2.covariance.weight_matrix_focal) num.testing.assert_equal(sc1.covariance.covariance_matrix_focal, sc2.covariance.covariance_matrix_focal) num.testing.assert_equal(sc1.covariance.covariance_matrix, sc2.covariance.covariance_matrix) finally: shutil.rmtree(tmp_dir)
def post_process(self, *args, **kwargs): resp = gf.SatelliteTarget.post_process(self, *args, **kwargs) from kite import Scene from kite.scene import SceneConfig, FrameConfig, Meta patch = self.scene_patch grid, _ = patch.get_grid() displacement = num.empty_like(grid) displacement.fill(num.nan) displacement[patch.get_mask()] = resp.result['displacement.los'] theta, phi = patch.get_incident_angles() llLat, llLon = patch.get_ll_anchor() urLat, urLon = patch.get_ur_anchor() dLon = num.abs(llLon - urLon) / patch.resolution[0] dLat = num.abs(llLat - urLat) / patch.resolution[1] scene_config = SceneConfig(meta=Meta( scene_title='Pyrocko Scenario Generator - {orbit} ({time})'. format(orbit=self.scene_patch.orbital_node, time=datetime.now()), orbital_node=patch.orbital_node, scene_id='pyrocko_scenario_%s' % self.scene_patch.orbital_node, satellite_name='Sentinel-1 (Scenario)'), frame=FrameConfig(llLat=float(llLat), llLon=float(llLon), dN=float(dLat), dE=float(dLon), spacing='degree')) scene = Scene(displacement=displacement, theta=theta, phi=phi, config=scene_config) resp.scene = scene return resp
def kite_downsample_isce_unw(datafile, outname, epislon=1, nan_allowed=0.99, tile_size_min=0.002, tile_size_max=0.010): """ -------- quadtree downsample an interferogram --------- epsilon - variance cutoff before the quadtree splits nan_allowed - fraction of pixels that can be nan and still get used tile_size_min - degrees tile_size_max - degrees datafile: .unw.geo file with a matching .xml in the same directory outname: the geojson produced los_rdr_file: los.rdr.geo as produced by isce must be in the same directory """ print("Quadtree Downsampling the file %s into geojson %s " % (datafile, outname)); sc = Scene.import_data(datafile); qt = sc.quadtree qt.epsilon = epislon qt.nan_allowed = nan_allowed qt.tile_size_min = tile_size_min qt.tile_size_max = tile_size_max qt.export_geojson(outname); return;
def load_kite_scenes(datadir, names): """ Load SAR data from the kite format. """ try: from kite import Scene except ImportError: raise ImportError( 'kite not installed! please checkout www.pyrocko.org!') diffgs = [] tobeloaded_names = set(copy.deepcopy(names)) for k in names: try: sc = Scene.load(os.path.join(datadir, k)) diffgs.append(heart.DiffIFG.from_kite_scene(sc)) tobeloaded_names.discard(k) except ImportError: logger.warning('File %s not conform with kite format!' % k) names = list(tobeloaded_names) return diffgs
def load_kite_scenes(datadir, names): """ Load SAR data from the kite format. """ try: from kite import Scene from kite.scene import UserIOWarning except ImportError: raise ImportError( 'kite not installed! please checkout www.pyrocko.org!') diffgs = [] for k in names: filepath = os.path.join(datadir, k) try: logger.info('Loading scene: %s' % k) sc = Scene.load(filepath) diffgs.append(heart.DiffIFG.from_kite_scene(sc)) logger.info('Successfully imported kite scene %s' % k) except (ImportError, UserIOWarning): logger.warning('File %s not conform with kite format!' % k) return diffgs
def quadtree_plot(inps): """plot the leaves""" print('Start ploting quadtree result') outfile = inps.outfile[0] sc = Scene.load(outfile + '.yml') qt = sc.quadtree fig = plt.figure() ax = fig.gca() limit = np.abs(qt.leaf_medians).max() color_map = cm.ScalarMappable( norm=colors.Normalize(vmin=-limit, vmax=limit), cmap=cm.get_cmap('jet')) for rect, leaf in zip(qt.getMPLRectangles(), qt.leaves): color = color_map.to_rgba(leaf.median) rect.set_facecolor(color) ax.add_artist(rect) ax.set_xlim(qt.leaf_eastings.min(), qt.leaf_eastings.max()) ax.set_ylim(qt.leaf_northings.min(), qt.leaf_northings.max()) fig.savefig(outfile + '.png', dip=300, bbox_inches='tight')
def quadtree_kite(inps): """using kite do quadtree donwsampling""" logging.basicConfig(level=logging.DEBUG) file = inps.file[0] sc = Scene.import_data(file) # For convenience we set an abbreviation to the quadtree qt = sc.quadtree # Parametrisation of the quadtree qt.epsilon = inps.epsilon[0] # Variance threshold qt.nan_allowed = inps.nan[0] # Percentage of NaN values allowed per tile/leave # Be careful here, if you scene is referenced in degree use decimal values! qt.tile_size_max = inps.tile_max[0] # Maximum leave edge length in [m] or [deg] qt.tile_size_min = inps.tile_min[0] # Minimum leave edge length in [m] or [deg] print('the reduction rsm is %f' % qt.reduction_rms) # In units of [m] or [deg] # We save the scene in kite's format outname = inps.outfile[0] sc.save(outname) # export to csv format geometry = inps.geometry[0] point_disp, point_angle = export_csv(sc, geometry, outname) # Or export the quadtree to CSV file #qt.export_csv(outname + '.csv') #print(sc.phi) #qt.export_geojson(outname + '.json') # export json file for grid search write_json(inps, point_disp, point_angle) return
class TestMatlabScene(unittest.TestCase): def setUp(self): file = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'data/20110214_20110401_ml4_sm.unw.geo_ig_dsc_ionnocorr.mat') self.sc = Scene() self.sc.setLogLevel('ERROR') self.sc.import_data(file) self.sc.meta.scene_title = 'Matlab Input - Myanmar 2011-02-14' def testQuadtree(self): qt = self.sc.quadtree for e in num.linspace(0.118, .3, num=30): qt.epsilon = e for nan in num.linspace(0.1, 1., num=30): qt.nan_allowed = nan for s in num.linspace(100, 4000, num=30): qt.tile_size_min = s qt.tile_size_max = 5000 for s in num.linspace(200, 4000, num=30): qt.tile_size_min = 0 qt.tile_size_max = 5000 def testIO(self): import tempfile import shutil tmp_dir = tempfile.mkdtemp(prefix='kite') # print(tmp_dir) file = os.path.join(tmp_dir, self.__class__.__name__) sc1 = self.sc sc1.quadtree.epsilon = .076 sc1.quadtree.tile_size_min = 50 sc1.quadtree.tile_size_max = 12773 sc1.quadtree.nan_allowed = .8 sc1.covariance.config.a = 0.008 sc1.covariance.config.b = 300.2 sc1.covariance.config.variance = .2 sc1.covariance.covariance_matrix try: sc1.save(file) sc2 = Scene() sc2.setLogLevel('ERROR') sc2.load(file) self.assertEqual(sc1.quadtree.epsilon, sc2.quadtree.epsilon) self.assertEqual(sc1.quadtree.nan_allowed, sc2.quadtree.nan_allowed) self.assertEqual(sc1.quadtree.tile_size_min, sc2.quadtree.tile_size_min) self.assertEqual(sc1.quadtree.tile_size_max, sc2.quadtree.tile_size_max) self.assertEqual(sc1.quadtree.nleafs, sc2.quadtree.nleafs) self.assertEqual([l.id for l in sc1.quadtree.leafs], [l.id for l in sc2.quadtree.leafs]) self.assertEqual(sc1.covariance.variance, sc2.covariance.variance) self.assertEqual(sc1.covariance.covariance_model, sc2.covariance.covariance_model) num.testing.assert_equal(sc1.covariance.weight_matrix_focal, sc2.covariance.weight_matrix_focal) num.testing.assert_equal(sc1.covariance.covariance_matrix_focal, sc2.covariance.covariance_matrix_focal) num.testing.assert_equal(sc1.covariance.covariance_matrix, sc2.covariance.covariance_matrix) finally: shutil.rmtree(tmp_dir)
print('Av. Heading:', np.nanmean(heading)) print('Av Look:', np.nanmean(look)) theta = np.deg2rad(90. - look) phi = np.ones((ds.RasterYSize, ds.RasterXSize)) * np.deg2rad(-90 - heading) print('Av. theta:', np.rad2deg(np.nanmean(theta))) print('Av phi:', np.rad2deg(np.nanmean(phi))) # sys.exit() #los[np.isnan(los)]=0.0 #theta[np.isnan(theta)]=0.0 #phi[np.isnan(phi)]=0.0 sc = Scene() sc.displacement = -los # !!! lower left corner !!! sc.frame.llLat = ds_geo[3] + ds_geo[5] * ds.RasterYSize sc.frame.llLon = ds_geo[0] sc.frame.dN = -ds_geo[5] sc.frame.dE = ds_geo[1] sc.frame.spacing = 'degree' sc.theta = theta sc.phi = phi sc.meta.scene_id = arguments["--id"] #get ref point
def save_kite(inps): """save kite""" # read mintpy data date1, date2, disp, disp_atr, incidence, azimuth = read_HDFEOS(inps) # subset data based on bbox if inps.SNWE: print('Subset data based on bbox') lat_user, lon_user, row, sample, rows,samples = extract_data_based_bbox(inps) disp = disp[row: row+rows, sample: sample+samples] incidence = incidence[row: row+rows, sample: sample+samples] azimuth = azimuth[row: row+rows, sample: sample+samples] # convert to head angle print('convert azimuth angle to head angle') head = ut.azimuth2heading_angle(azimuth) phi = -head + 180 # convert degree to radian incidence *= np.pi/180 phi *= np.pi/180 sc = Scene() # flip up-down of displacement and angle matrix sc.displacement = np.flipud(disp) sc.theta = np.flipud(incidence) sc.phi = np.flipud(phi) # calculate the scene's frame lower left corner, in geographical coordinate lon_ul = float(disp_atr['X_FIRST']) lat_ul = float(disp_atr['Y_FIRST']) lat_step = float(disp_atr['Y_STEP']) length = int(disp_atr['LENGTH']) lat_ll = lat_ul + lat_step * length lon_ll = lon_ul if inps.SNWE: lat_ll = lat_user lon_ll = lon_user sc.frame.llLat = lat_ll sc.frame.llLon = lon_ll # the pixel spacing can be either 'meter' or 'degree' sc.frame.spacing = disp_atr['X_UNIT'][:-1] sc.frame.dN = float(disp_atr['Y_STEP']) * (-1) sc.frame.dE = float(disp_atr['X_STEP']) # Saving the scene print('write Kite scene') if inps.outfile is not None: kite_outfile = inps.outfile[0] else: if inps.velocity: data_type = 'vel' else: data_type = 'dis' if inps.SNWE is not None: kite_outfile = inps.input_HDFEOS[0].split('.')[0] + '_' + str(date1) + '_' + str(date2) + '_' + data_type + '_subset' else: kite_outfile = inps.input_HDFEOS[0].split('.')[0] + '_' + str(date1) + '_' + str(date2) + '_' + data_type sc.save(kite_outfile)
def main(): if len(sys.argv) < 2: print( "input: asc_path dsc_path minlat minlon maxlat maxlon --workdir=name m" ) try: x0 = float(sys.argv[3]) y0 = float(sys.argv[4]) x1 = float(sys.argv[5]) y1 = float(sys.argv[6]) except: x0 = "eins" y0 = "eins" x1 = "eins" y1 = "eins" sharp = False loading = False plot = True topo = False synthetic = False calc_statistics = False subsample = False dump_grid = False for argv in sys.argv: if argv == "--sharp": sharp = True if argv == "--basic": sharp = "basic" if argv == "--ss": sharp = "ss" if argv == "--loading=True": loading = True if argv == "--loading=true": loading = True if argv == "--plot=False": plot = False if argv[0:10] == "--workdir=": name = argv[10:] if argv == "--topography": topo = True if argv == "--synthetic": synthetic = True if argv == "--statistics": calc_statistics = True if argv == "--subsample": subsample = True if argv == "--grond_export": dump_grid = True strikes = [] lengths = [] widths = [] if loading is False: img_asc, coh_asc, scene_asc, dates_asc = load(sys.argv[1], kite_scene=True) try: os.mkdir('work-%s' % name) except: pass files = glob.glob('work-%s/*' % name) for f in files: os.remove(f) fname = 'work-%s/asc.mod.tif' % name writeout(img_asc, fname, sc=scene_asc) longs_asc, lats_asc = to_latlon(fname) try: global_cmt_catalog = catalog.GlobalCMT() events = global_cmt_catalog.get_events( time_range=(num.min(dates_asc), num.max(dates_asc)), magmin=2., latmin=num.min(lats_asc), latmax=num.max(lats_asc), lonmin=num.min(longs_asc), lonmax=num.max(longs_asc)) areas = [] for ev in events: areas.append(num.cbrt(ev.moment_tensor.moment) / 1000) area = num.max(areas) except: area = 400 fname = 'work-%s/asc-' % name img_asc = process(img_asc, coh_asc, longs_asc, lats_asc, scene_asc, x0, y0, x1, y1, fname, plot=plot, mode=sharp, loading=loading, topo=topo, synthetic=synthetic, calc_statistics=calc_statistics, subsample=subsample) fname = 'work-%s/asc.mod.tif' % name writeout(img_asc, fname, sc=scene_asc) db = 1 dates = [] img_asc, coh_asc, scene_asc, dates_asc = load(sys.argv[1], kite_scene=True) dates.append(dates_asc) snr_asc = aoi_snr(img_asc, area) img_dsc, coh_dsc, scene_dsc, dates_dsc = load(sys.argv[2], kite_scene=True) dates.append(dates_dsc) fname = 'work-%s/dsc.mod.tif' % name writeout(img_dsc, fname, sc=scene_dsc) longs_dsc, lats_dsc = to_latlon(fname) fname = 'work-%s/dsc-' % name img_dsc = process(img_dsc, coh_dsc, longs_dsc, lats_dsc, scene_dsc, x0, y0, x1, y1, fname, plot=plot, mode=sharp, loading=loading, topo=topo, synthetic=synthetic, calc_statistics=calc_statistics, subsample=subsample) fname = 'work-%s/dsc.mod.tif' % name writeout(img_dsc, fname, sc=scene_dsc) db = 1 img_dsc, coh_dsc, scene_dsc, dates = load(sys.argv[2], kite_scene=True) snr_dsc = aoi_snr(img_dsc, area) minda = num.min(scene_asc.displacement) mindd = num.min(scene_dsc.displacement) mind = num.min([minda, mindd]) maxa = num.max(scene_asc.displacement) maxdd = num.max(scene_dsc.displacement) maxd = num.max([maxa, maxdd]) max_cum = num.max([abs(maxd), abs(mind)]) minda = -max_cum mindd = -max_cum mind = -max_cum maxa = max_cum maxdd = max_cum maxd = max_cum if plot is True: fname = 'work-%s/asc' % name plot_on_map(db, scene_asc, longs_asc, lats_asc, x0, y0, x1, y1, minda, maxa, fname, synthetic=synthetic, topo=topo, kite_scene=True) fname = 'work-%s/dsc' % name plot_on_map(db, scene_dsc, longs_dsc, lats_dsc, x0, y0, x1, y1, mindd, maxdd, fname, synthetic=synthetic, topo=topo, kite_scene=True) fname = 'work-%s/asc.mod.tif' % name comb = rasterio.open(fname) longs_comb, lats_comb = to_latlon(fname) comb_img = comb.read(1) centers_bounding, coords_out, coords_box, strike, ellipses, max_bound = bounding_box( comb_img, 400, sharp) for st in strike: strikes.append(st) print("Strike(s) of moment weighted centerline(s) are :%s" % strike) if plot is True: fname = 'work-%s/asc-comb-' % name plot_on_kite_box(coords_box, coords_out, scene_asc, longs_asc, lats_asc, longs_comb, lats_comb, x0, y0, x1, y1, name, ellipses, minda, maxa, fname, synthetic=synthetic, topo=topo) fname = 'work-%s/dsc.mod.tif' % name comb = rasterio.open(fname) longs_comb, lats_comb = to_latlon(fname) comb_img = comb.read(1) centers_bounding, coords_out, coords_box, strike, ellipses, max_bound = bounding_box( comb_img, 400, sharp) for st in strike: strikes.append(st) print("Strike(s) of moment weighted centerline(s) are :%s" % strike) if plot is True: fname = 'work-%s/dsc-comb-' % name plot_on_kite_box(coords_box, coords_out, scene_dsc, longs_dsc, lats_dsc, longs_comb, lats_comb, x0, y0, x1, y1, name, ellipses, mindd, maxdd, fname, synthetic=synthetic, topo=topo) comb_img = combine('work-%s/asc.mod.tif' % name, 'work-%s/dsc.mod.tif' % name, name, weight_asc=snr_asc, weight_dsc=snr_dsc, plot=False) longs_comb, lats_comb = to_latlon("work-%s/merged.tiff" % name) else: fname = 'work-%s/merged.tiff' % name comb = rasterio.open(fname) longs, lats = to_latlon(fname) comb_img = comb.read(1) easts, norths = get_coords_from_geotiff(fname, comb_img) dE = easts[1] - easts[0] dN = norths[1] - norths[0] ll_long = num.min(longs) ll_lat = num.min(lats) dates = [] img_asc, coh_asc, scene_asc, dates_asc = load(sys.argv[1], kite_scene=True) img_dsc, coh_dsc, scene_dsc, dates_dsc = load(sys.argv[2], kite_scene=True) minda = num.min(scene_asc.displacement) mindd = num.min(scene_dsc.displacement) mind = num.min([minda, mindd]) maxa = num.max(scene_asc.displacement) maxdd = num.max(scene_dsc.displacement) maxd = num.max([maxa, maxdd]) if plot is True: plt.figure(figsize=(sz1, sz2)) plt.title('Loaded combined image') xr = plt.imshow(comb_img) plt.close() if subsample is True: # Define the scene's frame frame = FrameConfig( # Lower left geographical reference [deg] llLat=ll_lat, llLon=ll_long, # Pixel spacing [m] or [degrees] spacing='meter', dE=dE, dN=dN) displacement = comb_img # Look vectors # Theta is elevation angle from horizon theta = num.full_like(displacement, 48. * d2r) # Phi is azimuth towards the satellite, counter-clockwise from East phi = num.full_like(displacement, 23. * d2r) kite_comb_scene = Scene(displacement=displacement, phi=phi, theta=theta, frame=frame) kite_comb_scene.spool() # For convenience we set an abbreviation to the quadtree qt = kite_comb_scene.quadtree # Parametrisation of the quadtree qt.epsilon = 0.024 # Variance threshold qt.nan_allowed = 0.9 # Percentage of NaN values allowed per tile/leave qt.tile_size_max = 12000 # Maximum leave edge length in [m] or [deg] qt.tile_size_min = 250 # Minimum leave edge length in [m] or [deg] # We save the scene in kite's format # sc.save('kite_scene') # Or export the quadtree to CSV file # qt.export('/tmp/tree.csv') # statistical output # img_asc, coh_asc, scene_asc = load('muji_kite/asc', kite_scene=True) # comb_img = process(img_asc, coh_asc, plot=True) # use quadtree subsampling on gradient img_asc, coh_asc, scene_asc, dates = load(sys.argv[1], kite_scene=True) fname = 'work-%s/asc.mod.tif' % name longs_asc, lats_asc = to_latlon(fname) db = 1 longs_comb, lats_comb = to_latlon("work-%s/merged.tiff" % name) mindc = num.min(comb_img) maxdc = num.max(comb_img) try: global_cmt_catalog = catalog.GlobalCMT() events = global_cmt_catalog.get_events(time_range=(num.min(dates), num.max(dates)), magmin=2., latmin=num.min(lats_comb), latmax=num.max(lats_comb), lonmin=num.min(longs_comb), lonmax=num.max(longs_comb)) areas = [] for ev in events: areas.append(num.cbrt(ev.moment_tensor.moment) / 1000) area = num.max(areas) except: area = 400 if dump_grid is True: from scipy import signal es = longs_comb.flatten() es_resamp = signal.decimate(es, 20) ns = lats_comb.flatten() ns_resamp = signal.decimate(ns, 20) comb_img_grid = comb_img.flatten() comb_img_grid_resamp = signal.decimate(comb_img_grid, 20) fobj_cum = open(os.path.join('work-%s/grad_grid.ASC' % name), 'w') for x, y, sembcums in zip(es, ns, comb_img_grid.flatten()): fobj_cum.write('%.2f %.2f %.20f\n' % (x, y, sembcums)) fobj_cum.close() fobj_cum = open(os.path.join('work-%s/grad_grid_resam.ASC' % name), 'w') for x, y, sembcums in zip(es_resamp, ns_resamp, comb_img_grid_resamp): fobj_cum.write('%.2f %.2f %.20f\n' % (x, y, sembcums)) fobj_cum.close() if plot is True: fname = 'work-%s/comb-' % name plot_on_map(db, comb_img.copy(), longs_comb, lats_comb, x0, y0, x1, y1, mindc, maxdc, fname, synthetic=synthetic, topo=topo, comb=True) centers_bounding, coords_out, coords_box, strike, ellipses, max_bound = bounding_box( comb_img, area, sharp) for st in strike: strikes.append(st) print("Strike(s) of moment weighted centerline(s) are :%s" % strike) if plot is True: fname = 'work-%s/comb-' % name lengths, widths = plot_on_kite_box(coords_box, coords_out, scene_asc, longs_asc, lats_asc, longs_comb, lats_comb, x0, y0, x1, y1, name, ellipses, mind, maxd, fname, synthetic=synthetic, topo=topo) fobj_cum = open(os.path.join('work-%s/priors.ASC' % name), 'w') for lens, wid in zip(lengths, widths): fobj_cum.write('%.2f %.2f\n' % (lens, wid)) fobj_cum.close() fobj_cum = open(os.path.join('work-%s/priors_strike.ASC' % name), 'w') for st in zip(strikes): fobj_cum.write('%.2f\n' % (st)) fobj_cum.close() plot_on_kite_line(coords_out, scene_asc, longs_asc, lats_asc, longs_comb, lats_comb, x0, y0, x1, y1, mind, maxd, fname, synthetic=synthetic, topo=topo) simp_fault, comp_fault = simplify(centers_bounding) db = dump_geojson(simp_fault, longs_comb, lats_comb, name) if plot is True: plot_on_kite_scatter( db, scene_asc, longs_asc, lats_asc, x0, y0, x1, y1, mind, maxd, fname, synthetic=synthetic, topo=topo, ) img_dsc, coh_dsc, scene_dsc, dates = load(sys.argv[2], kite_scene=True) fname = 'work-%s/dsc.mod.tif' % name longs_dsc, lats_dsc = to_latlon(fname) fname = 'work-%s/comb-' % name if plot is True: plot_on_kite_scatter( db, scene_dsc, longs_dsc, lats_dsc, x0, y0, x1, y1, mind, maxd, fname, synthetic=synthetic, topo=topo, ) centers = skelotonize(comb_img) simp_fault, comp_fault = simplify(centers) if calc_statistics is True: res_faults = rup_prop(db) y = l1tf_prep(res_faults) run_l1tf(y)
def main(args=None): ''' Spool app deployed through setuptools ''' if args is None: args = sys.argv[1:] epilog = '''Spool is part of the kite InSAR framework. Author: Marius Paul Isken ([email protected]) Documentation: https://pyrocko.org''' desc = 'InSAR deformation inspector, quadtree and covariance' parser = ap.ArgumentParser(prog='spool', epilog=epilog, description=desc, parents=[], formatter_class=ap.RawTextHelpFormatter, prefix_chars='-', fromfile_prefix_chars=None, argument_default=ap.SUPPRESS, conflict_handler='resolve', add_help=True) parser.add_argument('file', type=str, help='Load native kite container (.npz & .yml)', default=None, nargs='?') parser.add_argument('--load', metavar='file', type=str, default=None, help='''Import file or directory Supported formats are: - Matlab *.mat - GAMMA * binary and *.par file - GMTSAR *.grd binary and *.los.* file - ISCE *.unw.geo with *.unw.geo.xml and; *.rdr.geo for LOS data - ROI_PAC * binary and *.rsc file - SARSCAPE *los_ll.grd and *.los.enu file - SNAP *.rsc and *.Abstracted_Metadata.txt file (GAMMA Export) - ARIA Extracted layers: unwrappedPhase, lookAngle, incidenceAngle, connectedComponents - LiCSAR *.unw.tif and LOS data, see client.download_licsar For more informatio see the online documentation at https://pyrocko.org''') parser.add_argument('--synthetic', type=str, default=None, choices=['fractal', 'sine', 'gauss'], help='''Synthetic Tests Available Synthetic Displacement: * fractal (Atmospheric model; after Hanssen, 2001) * sine * gauss ''') parser.add_argument('--verbose', '-v', action='count', default=1, help='Verbosity, add mutliple to increase verbosity.') ns = parser.parse_args(args) log_level = logging.WARNING - ns.verbose * 10 log_level = log_level if log_level > logging.DEBUG else logging.DEBUG logging.basicConfig(level=log_level if log_level > 0 else 0) if ns.load is None and ns.synthetic is None and ns.file is None: parser.print_help() sys.exit(0) sc = None if ns.synthetic is not None: if ns.synthetic == 'fractal': sc = TestScene.createFractal() elif ns.synthetic == 'sine': sc = TestScene.createSine() elif ns.synthetic == 'gauss': sc = TestScene.createFractal() else: parser.print_help() sys.exit(0) elif ns.file is not None: sc = Scene.load(ns.file) if sc: spool(scene=sc) elif ns.load is not None: spool(import_file=ns.load)
def importFile(self, filename): self.sigProgressStarted.emit(('Importing scene...', )) self.setScene(Scene.import_data(filename)) self.sigProgressFinished.emit()
def setUpClass(cls): file = common.get_test_data("myanmar_alos_dsc_ionocorr.mat") cls.sc = Scene.import_data(file)
def loadFile(self, filename): self.sigProcessingStarted.emit('Loading scene...') self.setScene(Scene.load(filename)) self.sigProcessingFinished.emit()
def importFile(self, filename): self.sigProcessingStarted.emit('Importing scene...') self.setScene(Scene.import_data(filename)) self.sigProcessingFinished.emit()
def loadFile(self, filename): self.sigProgressStarted.emit(('Loading scene...', )) self.setScene(Scene.load(filename)) self.sigProgressFinished.emit()
# Print reference points print('Reference point (lalo): {:.4f} N, {:.4f} E'.format(refLat, refLon)) print('Reference point (utm): {:.4f} E, {:.4f} N'.format( refUTM[0], refUTM[1])) # XY positions of the reference points in lon/lat xyLaLo = np.column_stack([ qt.leaf_focal_points[:, 0] + refLon, qt.leaf_focal_points[:, 1] + refLat ]) xyUTM = utm.from_latlon(xyLL[:, 1], xyLL[:, 0]) xyPos = xyUTM / 1000 # m to km return xyLaLo, xyPos ### MAIN --- if __name__ == '__main__': # Gather inputs inps = cmdParser() ## Load scene sc = Scene.load(inps.kiteScene) qt = sc.quadtree print('Loaded Kite scene: {:s}'.format(inps.kiteScene)) ## Parse coordinates xyLaLo, xyPos = parseCoords(sc)
from kite import Scene import matplotlib.pyplot as plt # Assume we have a existing kite.Scene with defined quadtree parametrized scene = Scene.load("acquila_2016.yml") ax = plt.gca() # Inspect the noise data which is used to calculate the covariance ax.imshow(scene.covariance.noise_data) plt.show() # Inspect the focal-point (quick mode) covariance matrix ax.imshow(scene.covariance.covariance_matrix_focal) # Inspect the full covariance matrix ax.imshow(scene.covariance.covariance_matrix) # Get the full weight matrix ax.imshow(scene.covariance.weight_matrix) # Get the covariance and weight between two leafes leaf_1 = scene.quadtree.leaves[0] leaf_2 = scene.quadtree.leaves[0] scene.covariance.getLeafCovariance(leaf_1, leaf_2) scene.covariance.getLeafWeight(leaf_1, leaf_2)