def select_track_points(xy, images, polygon, dem, max_distance): """ Return track points mask for a set of starting images. Returns: array: Coordinates of points to track (n, 2) array: Visibility mask for each image (n, m) """ # In DEM, in polygon, and DEM not NaN z = dem.sample(xy, bounds_error=False, fill_value=np.nan) mask = ~np.isnan(z) & glimpse.helpers.points_in_polygon(xy, polygon) # Visible in one or more images xyz = np.column_stack((xy, z))[mask] visible = np.tile(mask.reshape(-1, 1), reps=(1, len(images))) for i, img in enumerate(images): uv = img.cam.project(xyz, correction=True) # In image frame visible[mask, i] &= img.cam.inframe(uv) # In range distance = np.linalg.norm(xyz[:, 0:2] - img.cam.xyz[0:2], axis=1) visible[mask, i] &= distance < max_distance # In DEM viewshed viewshed = glimpse.Raster(Z=dem.viewshed(img.cam.xyz), x=dem.xlim, y=dem.ylim) visible[mask, i] &= viewshed.sample(xyz[:, 0:2], order=1) > 0.99 # Not in land mask land_mask = glimpse.Raster(load_masks([img])[0]) visible[mask, i] &= land_mask.sample(uv, order=1, bounds_error=False, fill_value=1.0) == 0 return visible
def flatten_tracks_doug(runs): # Join together second forward and backward runs f, r = runs['fv'], runs['rv'] means = np.column_stack((f.means[..., 3:], r.means[..., 3:])) sigmas = np.column_stack((f.sigmas[..., 3:], r.sigmas[..., 3:])) # Flatten joined runs # Mean: Inverse-variance weighted mean # Sigma: Linear combination of weighted correlated random variables # (approximation using the weighted mean of the variances) weights = sigmas**-2 weights *= 1 / np.nansum(weights, axis=1, keepdims=True) allnan = np.isnan(means).all(axis=1, keepdims=True) means = np.nansum(weights * means, axis=1, keepdims=True) sigmas = np.sqrt(np.nansum(weights * sigmas**2, axis=1, keepdims=True)) # np.nansum interprets sum of nans as 0 means[allnan] = np.nan sigmas[allnan] = np.nan return means.squeeze(axis=1), sigmas.squeeze(axis=1)
iranges = glimpse.helpers.cut_ranges(iranges, cuts) # Sudden very large motions viewdirs = np.array([img.cam.viewdir for img in images]) dtheta = np.diff(viewdirs, axis=0) pan_tilt = np.linalg.norm(dtheta[:, 0:2], axis=1) rotation = np.abs(dtheta[:, 2]) breaks = (pan_tilt > max_pan_tilt) | (rotation > max_rotation) cuts = [i + 1 for i in np.nonzero(breaks)[0]] iranges = glimpse.helpers.cut_ranges(iranges, cuts) # NOTE: Switch to index-based ranges (from range-based) ranges = iranges - (0, 1) # Periods of excessive motion # matplotlib.pyplot.figure() for i, j in iranges: viewdirs = np.array([img.cam.viewdir for img in images[i:j]]) bins = np.column_stack((datetimes[i:j], datetimes[i:j] + datetime.timedelta(days=3))) ibins = np.searchsorted(datetimes[i:j], bins) pan_tilt = [np.linalg.norm(viewdirs[m, 0:2] - viewdirs[m:n, 0:2], axis=1).max() for m, n in ibins] rotation = [np.abs(viewdirs[m, 2] - viewdirs[m:n, 2]).max() for m, n in ibins] peaks = (np.array(pan_tilt) > max_pan_tilt) | (np.array(rotation) > max_rotation) # # (plot) # matplotlib.pyplot.plot(datetimes[i:j], pan_tilt, color='grey') # matplotlib.pyplot.plot(datetimes[i:j], rotation, color='black') # matplotlib.pyplot.plot(datetimes[i:j], peaks, color='red') # # (print) # print(i, j) # print(''.join(np.array(['.', 'o'])[peaks.astype(int)])) # (cut) cutouts = [(i, i + 1) for i in np.nonzero(peaks)[0]] ranges = glimpse.helpers.cut_out_ranges(ranges, cutouts)
rows = np.searchsorted(ids, points['ids']) means[rows, col], sigmas[rows, col] = flatten_function(runs) flotation[rows, col] = points['flotation'] nobservers[rows, col] = points['observer_mask'].sum(axis=1) if mean_midtimes: # Midtime as mean midtime = origin + (runs['f'].datetimes - origin).mean() else: # Midtime as middle midtime = origin + (runs['f'].datetimes[[0, -1]] - origin).mean() ti[col] = runs['f'].datetimes[0], runs['f'].datetimes[ -1], midtime, track_ids[col] # Precompute spatial neighborhoods and masks ncols = template.shape[1] neighbor_ids = np.column_stack( (ids, ids - 1, ids + 1, ids - ncols, ids + ncols)) if diagonal_neighbors: neighbor_ids = np.column_stack( (neighbor_ids, np.column_stack((ids - 1 - cols, ids + 1 - cols, ids - 1 + cols, ids + 1 + cols)))) neighbor_rows = np.searchsorted(ids, neighbor_ids) missing = ~np.isin(neighbor_ids, ids) neighbor_rows[missing] = 0 few_cams = ( (nobservers[neighbor_rows, :] < min_observers) & (nobservers[neighbor_rows, :].max(axis=1, keepdims=True) >= min_observers)) isnan = np.isnan(means) # Apply spatial median filter fmeans = np.full(means.shape, np.nan, dtype=np.float32)
vy[..., lmask] = lvy # Plot matplotlib.pyplot.figure() # glimpse.Raster(np.sum(bad, axis=2), template.x, template.y).plot() glimpse.Raster(np.sum(~np.isnan(vx), axis=2) > 1, template.x, template.y).plot() # matplotlib.pyplot.colorbar() matplotlib.pyplot.plot(cg.Glacier()[:, 0], cg.Glacier()[:, 1]) # ---- Compute weights ---- # Use sum of distances to neighbors # Saturate at 0.5 years because of seasonal variability dts = np.column_stack( (np.concatenate(([datetime.timedelta(0)], np.diff(datetimes))), np.concatenate( (np.diff(datetimes), [datetime.timedelta(0)])))).sum(axis=1) weights = np.array([dt.total_seconds() / (3600 * 24 * 365) for dt in dts]) weights[weights > 0.5] = 0.5 # Lower weights for observations before 2004 dyears = np.array([ dt.total_seconds() / (3600 * 24 * 365) for dt in datetime.datetime(2004, 6, 18) - datetimes ]) weights[dyears > 0] *= (1 - dyears[dyears > 0] / max(dyears)) # ---- Compute summary statistics (cartesian) ---- w = glimpse.helpers.tile_axis(weights, vx.shape, axis=(0, 1)) vx_mean = glimpse.helpers.weighted_nanmean(vx, weights=w, axis=2) vy_mean = glimpse.helpers.weighted_nanmean(vy, weights=w, axis=2)