Пример #1
0
 ortho_grid = glimpse.Grid.read(ortho_path, d=grid_size)
 # Intersect bounding boxes
 cam_box = img.cam.viewbox(50e3)[[0, 1, 3, 4]]
 box = glimpse.helpers.intersect_boxes(np.row_stack((
     cam_box, dem_grid.box2d, ortho_grid.box2d)))
 # Read dem and ortho
 dem = glimpse.Raster.read(dem_path, xlim=box[0::2], ylim=box[1::2], d=grid_size)
 dem.crop(zlim=(0.1, np.inf))
 radius = circle_radius.get(image, circle_radius_default)
 dem.fill_circle(center=img.cam.xyz, radius=radius)
 nbands = gdal.Open(ortho_path).RasterCount
 bands = []
 for i in range(nbands):
     bands.append(glimpse.Raster.read(ortho_path, band=i + 1, d=grid_size,
         xlim=box[0::2], ylim=box[1::2]).Z)
 orthoZ = np.dstack(bands).astype(float)
 if not color:
     orthoZ = np.atleast_3d(glimpse.helpers.rgb_to_gray(orthoZ))
 orthoZ[orthoZ == 0] = np.nan
 # HACK: Clip dem and ortho to same size relative to x, y min
 ij = np.minimum(dem.shape, orthoZ.shape[0:2])
 dem = dem[(dem.shape[0] - ij[0]):, :ij[1]]
 orthoZ = orthoZ[(orthoZ.shape[0] - ij[0]):, :ij[1], :]
 # Compute mask
 if viewshed_scale != 1:
     smdem = dem.copy()
     smdem.resize(viewshed_scale)
 else:
     smdem = dem
 mask = glimpse.Raster(Z=smdem.viewshed(img.cam.xyz), x=dem.xlim, y=dem.ylim)
 if viewshed_scale != 1:
Пример #2
0
# Write metadata
glimpse.helpers.write_pickle(ti, os.path.join(rasters_path, 'ti.pkl'))
glimpse.helpers.write_pickle(xyi, os.path.join(rasters_path, 'xyi.pkl'))

# ---- Build strain rates ----

vx = glimpse.helpers.read_pickle(os.path.join(rasters_path, 'vx.pkl'))
vy = glimpse.helpers.read_pickle(os.path.join(rasters_path, 'vy.pkl'))
xout, yout, xin, yin = compute_principal_strains(vx, vy, template.d)

# Save as rasters
glimpse.helpers.write_pickle(xout, os.path.join(rasters_path,
                                                'extension_x.pkl'))
glimpse.helpers.write_pickle(yout, os.path.join(rasters_path,
                                                'extension_y.pkl'))
glimpse.helpers.write_pickle(xin,
                             os.path.join(rasters_path, 'compression_x.pkl'))
glimpse.helpers.write_pickle(yin,
                             os.path.join(rasters_path, 'compression_y.pkl'))

# Save as arrays
template = glimpse.Raster.read(os.path.join(rasters_path, 'template.tif'))
rows, cols = np.nonzero(template.Z)
glimpse.helpers.write_pickle(
    np.dstack((xout[rows, cols, :], yout[rows, cols, :])),
    os.path.join(arrays_path, 'extension.pkl'))
glimpse.helpers.write_pickle(
    np.dstack((xin[rows, cols, :], yin[rows, cols, :])),
    os.path.join(arrays_path, 'compression.pkl'))
Пример #3
0
 selected = np.count_nonzero(observer_mask, axis=1) > 0
 # xy (n, ), observer_mask (n, o)
 xy, obsmask, ids = track_points[selected], observer_mask[selected], track_ids[selected]
 # ---- Compute motion parameters (cartesian) ----
 n = len(xy)
 # vxyz | vxyz_sigma
 vxyz = np.ones((n, 3), dtype=float)
 vxyz_sigma = np.ones((n, 3), dtype=float)
 # (x, y): Sample from velocity grids
 vxyz[:, 0] = vx.sample(xy, order=0)
 vxyz[:, 1] = vy.sample(xy, order=0)
 vxyz_sigma[:, 0] = vx_sigma.sample(xy, order=0)
 vxyz_sigma[:, 1] = vy_sigma.sample(xy, order=0)
 # (z): Compute by integrating dz/dx and dz/dy over vx and vy
 rowcol = dem.xy_to_rowcol(xy, snap=True)
 dz = np.dstack(dem.gradient())[rowcol[:, 0], rowcol[:, 1], :]
 # sigma for dz/dx * vx + dz/dy * vy, assume zi, zj are fully correlated
 udz = unp.uarray(dz, sigma=None)
 uvxy = unp.uarray(vxyz[:, 0:2], vxyz_sigma[:, 0:2])
 vxyz[:, 2], vxyz_sigma[:, 2] = (
     udz[:, 0] * uvxy[:, 0] + udz[:, 1] * uvxy[:, 1]).tuple()
 # ---- Compute motion parameters (cylindrical) ----
 n = len(xy)
 # vrthz | vrthz_sigma
 vrthz = np.ones((n, 3), dtype=float)
 vrthz_sigma = np.ones((n, 3), dtype=float)
 # (r, theta): Sample from velocity grids
 vrthz[:, 0] = vr.sample(xy, order=0)
 vrthz[:, 1] = theta.sample(xy, order=0)
 vrthz_sigma[:, 0] = vr_sigma.sample(xy, order=0)
 vrthz_sigma[:, 1] = theta_sigma.sample(xy, order=0)
Пример #4
0
                            xlim=template.xlim,
                            ylim=template.ylim,
                            nan=nan)
    # NOTE: Avoiding faster grid sampling because of NaN
    vx0.append(
        x.sample(points, order=1, bounds_error=False).reshape(template.shape))
    vy0.append(
        y.sample(points, order=1, bounds_error=False).reshape(template.shape))
    datestr = re.findall(r'^([0-9]{8})', key[0])[0]
    t0.append(datetime.datetime.strptime(datestr, '%Y%m%d'))
# Remove unread keys
for i in dropped:
    velocity_keys.pop(i)
# Stack results
datetimes = np.array(t0)
vx = np.dstack(vx0)
vy = np.dstack(vy0)

# ---- Filter Landsat velocities ----

lmask = np.array([key[1] == 'landsat' for key in velocity_keys])
lvx = vx[..., lmask].copy()
lvy = vy[..., lmask].copy()
# Normalize vx, vy to the unit circle
theta = np.arctan2(lvy, lvx)
theta[theta < 0] += 2 * np.pi
uy = np.sin(theta)
ux = np.cos(theta)
# Compute moving-window median orientations
mask = ~np.isnan(ux)
mux = np.zeros(ux.shape, dtype=float)