コード例 #1
0
def tide_height(t):
    if isinstance(t, datetime.datetime):
        t = [t]
    t = np.asarray(t)
    dt = datetime.timedelta(hours=1.5)
    t_begin = np.nanmin(t).replace(minute=0, second=0, microsecond=0)
    t_end = np.nanmax(t) + dt
    # https://tidesandcurrents.noaa.gov/api/
    params = dict(
        format='json',
        units='metric',
        time_zone='gmt',
        datum='MSL',
        product='hourly_height',
        station=9454240,  # Valdez
        begin_date=t_begin.strftime('%Y%m%d %H:%M'),
        end_date=t_end.strftime('%Y%m%d %H:%M'))
    r = requests.get('https://tidesandcurrents.noaa.gov/api/datagetter',
                     params=params)
    v = [float(item['v']) for item in r.json()['data']]
    return np.interp([dti.total_seconds() for dti in t - t_begin],
                     np.linspace(0, 3600 * len(v[1:]), len(v)), v)
コード例 #2
0
DATA_DIR = "/home/dunbar/Research/helheim/data/observations"
DEM_DIR = os.path.join(DATA_DIR, 'dem')
MAX_DEPTH = 30e3

# ---- Prepare Observers ----

observerpath = ['stardot1','stardot2']
observers = []
for observer in observerpath:
    path = join(DATA_DIR,observer)
    campaths =  glob.glob(join(path,"*.JSON"))
    images = [glimpse.Image(path=campath.replace(".JSON",".jpg"),cam=campath) for campath in campaths]
    images.sort(key= lambda img: img.datetime)
    datetimes = np.array([img.datetime for img in images])
    for n, delta in enumerate(np.diff(datetimes)):
        if delta <= datetime.timedelta(seconds=0):
            secs = datetime.timedelta(seconds= n%5 + 1)
            images[n+1].datetime = images[n+1].datetime + secs
    diffs = np.array([dt.total_seconds() for dt in np.diff(np.array([img.datetime for img in images]))])
    negate = diffs[diffs <= 1].astype(np.int)
    [images.pop(_) for _ in negate]

    images = images[:int(len(images)/2)]
    print("Image set {} \n".format(len(images)))
    obs = glimpse.Observer(list(np.array(images)),cache=False)
    observers.append(obs)
#-------------------------
uv = observer[1].images[0].cam.project(( 7361411.0,533528.0,180))
observer[1].images[0].cam.plot()
matplotlib.pyplot.scatter(uv[:, 0], uv[:, 1])
コード例 #3
0
 params = glimpse.helpers.read_pickle(
     os.path.join(points_path, basename + '.pkl'))
 # ---- Load DEM ----
 # dem, dem_sigma
 dem, dem_sigma = dem_interpolant(t, return_sigma=True)
 # Crop DEM (for lower memory use)
 box = (glimpse.helpers.bounding_box(params['xy']) +
     np.array([-1, -1, 1, 1]) * dem_padding)
 dem.crop(xlim=box[0::2], ylim=box[1::2])
 dem_sigma.crop(xlim=box[0::2], ylim=box[1::2])
 dem.crop_to_data()
 dem_sigma.crop_to_data()
 # ---- Compute motion models ----
 cylindrical = 'vrthz' in params
 # motion_models
 time_unit = datetime.timedelta(days=1)
 m = len(params['xy'])
 if cylindrical:
     vrthz_sigmas = [compute_vrthz_sigma(
         params['vrthz_sigma'][i], params['flotation'][i]) for i in range(m)]
     arthz_sigmas = [compute_arthz_sigma(
         vrthz_sigmas[i], params['flotation'][i]) for i in range(m)]
     motion_models = [glimpse.tracker.CylindricalMotionModel(
         n=n, dem=dem, dem_sigma=dem_sigma, time_unit=time_unit,
         xy_sigma=xy_sigma, xy=params['xy'][i], vrthz=params['vrthz'][i],
         vrthz_sigma=vrthz_sigmas[i], arthz_sigma=arthz_sigmas[i])
         for i in range(m)]
 else:
     vxyz_sigmas = [compute_vxyz_sigma(
         params['vxyz_sigma'][i], params['flotation'][i]) for i in range(m)]
     axyz_sigmas = [compute_axyz_sigma(
コード例 #4
0
ファイル: build_observers.py プロジェクト: ezwelty/glimpse-cg
import cg
from cg import glimpse
from glimpse.imports import (os, np, datetime, matplotlib)
root = '/volumes/science/data/columbia'
cg.IMAGE_PATH = os.path.join(root, 'timelapse')

# Stations to include
stations = (
    'CG04', 'CG05', 'CG06', 'AK01', 'AK03', 'AK03b', 'AK01b', 'AK10', 'AK09',
    'AK09b', 'AKST03A', 'AKST03B', 'AK12', 'AKJNC', 'AK10b')
# HACK: Use same snap as in build_viewdirs.py, etc.
snap = datetime.timedelta(hours=2)
# Max rotations (instantaneous or cumulative) to allow within Observer
max_rotation = 1 # degrees about optical axis
max_pan_tilt = 2 # degrees pan or tilt
# Max temporal gap to allow within Observer
max_gap = datetime.timedelta(days=2)
# Minimum number of images to require within Observer
min_images = 2
# Minimum time span of Observer stack
min_dt = datetime.timedelta(days=1)
# Minimum number of Observers in stack
min_observers = 1
# Nominal Observer time span
nominal_bin_dt = datetime.timedelta(days=3)
# Forward shift between consecutive bins
step_dt = datetime.timedelta(days=1)

# ---- Compute station ranges ----
# station_ranges, station_images, station_datetimes
コード例 #5
0
from glimpse.imports import (sys, datetime, matplotlib, np, os)
import glob

root = '/volumes/science-b/data/columbia'
cg.IMAGE_PATH = os.path.join(root, 'timelapse')
cg.KEYPOINT_PATH = os.path.join(root, 'timelapse-keypoints')
cg.MATCHES_PATH = os.path.join(root, 'timelapse-matches')

# ---- Set script constants ----

STATIONS = ('CG04', 'CG05', 'CG06', 'AK01', 'AK01b', 'AK03', 'AK03b', 'AK09',
            'AK09b', 'AK10', 'AK10b', 'AK12', 'AKJNC', 'AKST03A', 'AKST03B')
SKIP_SEQUENCES = ('AK01_20070817', 'AK01_20080616', 'AK01b_20080619',
                  'AK01b_20090824', 'AK01b_20090825', 'AK01b_20160908',
                  'AK03_20070817', 'AK12_20100820')
SNAP = datetime.timedelta(hours=2)
MAXDT = datetime.timedelta(days=1)
MATCH_SEQ = np.concatenate((np.arange(12) + 1, (100, 300, 1000, 3000)))
MAX_RATIO = 0.6
MAX_ERROR = 0.03  # fraction of image width
N_MATCHES = 50
MIN_MATCHES = 50

# ---- Functions ----


def write_matches(matcher, **kwargs):
    matcher.build_matches(maxdt=MAXDT,
                          seq=MATCH_SEQ,
                          path=cg.MATCHES_PATH,
                          overwrite=False,
コード例 #6
0
# ---- Prepare Observers ----

observerpath = ['stardot1', 'stardot2']
observers = []
for observer in observerpath:
    path = join(DATA_DIR, observer)
    campaths = glob.glob(join(path, "*.JSON"))
    images = [
        glimpse.Image(path=campath.replace(".JSON", ".jpg"), cam=campath)
        for campath in campaths
    ]
    images.sort(key=lambda img: img.datetime)
    datetimes = np.array([img.datetime for img in images])
    for n, delta in enumerate(np.diff(datetimes)):
        if delta <= datetime.timedelta(seconds=0):
            secs = datetime.timedelta(seconds=n % 5 + 1)
            images[n + 1].datetime = images[n + 1].datetime + secs
    diffs = np.array([
        dt.total_seconds()
        for dt in np.diff(np.array([img.datetime for img in images]))
    ])
    negate = diffs[diffs <= 1].astype(np.int)
    [images.pop(_) for _ in negate]

    images = images[:int(len(images) / 2)]
    print("Image set {} \n".format(len(images)))
    obs = glimpse.Observer(list(np.array(images)), cache=False)
    observers.append(obs)
#-------------------------
コード例 #7
0
vy[..., lmask] = lvy

# Plot
matplotlib.pyplot.figure()
# glimpse.Raster(np.sum(bad, axis=2), template.x, template.y).plot()
glimpse.Raster(np.sum(~np.isnan(vx), axis=2) > 1, template.x,
               template.y).plot()
# matplotlib.pyplot.colorbar()
matplotlib.pyplot.plot(cg.Glacier()[:, 0], cg.Glacier()[:, 1])

# ---- Compute weights ----
# Use sum of distances to neighbors
# Saturate at 0.5 years because of seasonal variability

dts = np.column_stack(
    (np.concatenate(([datetime.timedelta(0)], np.diff(datetimes))),
     np.concatenate(
         (np.diff(datetimes), [datetime.timedelta(0)])))).sum(axis=1)
weights = np.array([dt.total_seconds() / (3600 * 24 * 365) for dt in dts])
weights[weights > 0.5] = 0.5
# Lower weights for observations before 2004
dyears = np.array([
    dt.total_seconds() / (3600 * 24 * 365)
    for dt in datetime.datetime(2004, 6, 18) - datetimes
])
weights[dyears > 0] *= (1 - dyears[dyears > 0] / max(dyears))

# ---- Compute summary statistics (cartesian) ----

w = glimpse.helpers.tile_axis(weights, vx.shape, axis=(0, 1))
vx_mean = glimpse.helpers.weighted_nanmean(vx, weights=w, axis=2)