示例#1
0
def test_era5_fetch_waveperiod():
    #if not Era5().fetch_waveperiod(**kwargs):
    #    print('era5 query was fetched already, skipping...')
    fetch_handler('waveperiod', 'era5', **kwargs)
示例#2
0
def test_era5_fetch_wind():
    #if not Era5().fetch_wind(**kwargs):
    #    print('era5 query was fetched already, skipping...')
    fetch_handler('wind_u', 'era5', **kwargs)
    fetch_handler('wind_v', 'era5', **kwargs)
    fetch_handler('wind_uv', 'era5', **kwargs)
示例#3
0
def animate(var,
            source,
            kwargs,
            step=timedelta(hours=12),
            fps=30,
            plot_wind=False,
            debug=False):
    """

    var='temp'
    kwargs = dict(
            start=datetime(2015, 1, 2, 12), end=datetime(2015, 12, 31),
            south=45,                   west=-68.4, 
            north=51.5,                 east=-56.5, 
            top=0,                      bottom=0
        )

    debug=False
    animate('temp', 'hycom', kwargs, step=timedelta(hours=6), fps=30, debug=debug)
    """
    # download all the data first
    fetch_handler(var, source, **kwargs)

    # prepare folder and check for existing frames
    dirname = storage_cfg() + 'figures/'
    if not os.path.isdir(dirname): os.mkdir(dirname)
    png = lambda f: f if '.png' in f else None
    old = map(png, list(os.walk(dirname))[0][2])
    _rm = [os.remove(f'{dirname}{x}') for x in old if debug and x is not None]

    # generate image frames
    qry = kwargs.copy()
    cur = datetime(kwargs['start'].year, kwargs['start'].month,
                   kwargs['start'].day)
    while cur <= kwargs['end']:
        qry['start'] = cur
        qry['end'] = cur + step
        fname = f'{var}_{cur.isoformat()}.png'
        if not os.path.isfile(f'{dirname}/{fname}'):
            plot2D(var, source, plot_wind=plot_wind, save=fname, **qry)
        cur += step

    # filename and path for output
    fname = (f'{var}_{kwargs["start"].date().isoformat()}'
             f'_{kwargs["end"].date().isoformat()}.mp4')
    savedir = f'{storage_cfg()}animated{os.path.sep}'
    if not os.path.isdir(savedir): os.mkdir(savedir)

    # aggregate frames within query range and append to mp4 file
    logging.info(f'animating {fname}...')
    fmt = f'{var}_%Y-%m-%dT%H:%M:%S.png'
    frames = sorted([
        f'{dirname}{i}' for i in map(png,
                                     list(os.walk(f'{dirname}'))[0][2])
        if i is not None and datetime.strptime(i, fmt) >= kwargs['start']
        and datetime.strptime(i, fmt) <= kwargs['end']
    ])
    with imageio.get_writer(f'{savedir}{fname}',
                            mode='I',
                            macro_block_size=4,
                            format='FFMPEG',
                            fps=fps) as w:
        list(map(w.append_data, map(imageio.imread, frames)))

    logging.info(f'saved animation to {savedir}{fname}')
    return
示例#4
0
def test_era5_fetch_windwaveswellheight():
    #if not Era5().fetch_windwaveswellheight(**kwargs):
    #    print('era5 query was fetched already, skipping...')
    fetch_handler('waveheight', 'era5', **kwargs)
示例#5
0
    def __init__(self,
                 load_bathymetry=0,
                 load_temp=0,
                 load_salinity=0,
                 load_wavedir=0,
                 load_waveheight=0,
                 load_waveperiod=0,
                 load_wind_uv=0,
                 load_wind_u=0,
                 load_wind_v=0,
                 load_water_uv=0,
                 load_water_u=0,
                 load_water_v=0,
                 fetch=4,
                 **kwargs):

        for kw in [
                k for k in ('south', 'west', 'north', 'east', 'top', 'bottom',
                            'start', 'end') if k not in kwargs.keys()
        ]:
            kwargs[kw] = default_val[kw]

        data = {}
        callbacks = []
        vartypes = [
            'bathy',
            'temp',
            'salinity',
            'wavedir',
            'waveheight',
            'waveperiod',
            'wind_uv',
            'wind_u',
            'wind_v',
            'water_uv',
            'water_u',
            'water_v',
        ]
        load_args = [
            load_bathymetry,
            load_temp,
            load_salinity,
            load_wavedir,
            load_waveheight,
            load_waveperiod,
            load_wind_uv,
            load_wind_u,
            load_wind_v,
            load_water_uv,
            load_water_u,
            load_water_v,
        ]

        # if load_args are not callable, convert it to a callable function
        for v, load_arg, ix in zip(vartypes, load_args, range(len(vartypes))):
            if callable(load_arg): callbacks.append(load_arg)

            elif isinstance(load_arg, str):
                key = f'{v}_{load_arg.lower()}'
                assert key in load_map.keys(
                ), f'no map for {key} in\n{load_map=}'
                callbacks.append(load_map[key])
                if fetch is not False:
                    fetch_handler(v,
                                  load_arg.lower(),
                                  parallel=fetch,
                                  **kwargs)

            elif isinstance(load_arg, (int, float)):
                data[f'{v}_val'] = load_arg
                data[f'{v}_lat'] = kwargs['south']
                data[f'{v}_lon'] = kwargs['west']
                data[f'{v}_time'] = dt_2_epoch(kwargs['start'])
                if v in var3d: data[f'{v}_depth'] = kwargs['top']
                callbacks.append(load_callback)

            elif isinstance(load_arg, (list, tuple, np.ndarray)):
                if len(load_arg) not in (3, 4):
                    raise ValueError(
                        f'invalid array shape for load_{v}. '
                        'arrays must be ordered by [val, lat, lon] for 2D data, or '
                        '[val, lat, lon, depth] for 3D data')
                data[f'{v}_val'] = load_arg[0]
                data[f'{v}_lat'] = load_arg[1]
                data[f'{v}_lon'] = load_arg[2]
                if len(load_arg) == 4: data[f'{v}_depth'] = load_arg[3]
                callbacks.append(load_callback)

            else:
                raise TypeError(
                    f'invalid type for load_{v}. '
                    'valid types include string, float, array, and callable')

        q = Queue()

        # prepare data pipeline
        pipe = zip(callbacks, vartypes)
        is_3D = [v in var3d for v in vartypes]
        is_arr = [not isinstance(arg, (int, float)) for arg in load_args]
        columns = [fcn(v=v, data=data, **kwargs) for fcn, v in pipe]
        intrpmap = [(Uniform2D, Uniform3D), (Interpolator2D, Interpolator3D)]
        reshapers = [reshape_3D if v else reshape_2D for v in is_3D]
        # map interpolations to dictionary
        self.interps = {}
        interpolators = map(lambda x, y: intrpmap[x][y], is_arr, is_3D)
        interpolations = map(lambda i, r, c, v, q=q: Process(
            target=worker, args=(i, r, c, v, q)),
                             interpolators,
                             reshapers,
                             columns,
                             vartypes)

        # assert that no empty arrays were returned by load function
        for col, var in zip(columns, vartypes):
            if isinstance(col, dict) or isinstance(col[0], (int, float)):
                continue
            assert len(col[0]) > 0, (
                f'no data found for {var} in region {fmt_coords(kwargs)}. '
                f'consider expanding the region')

        # compute interpolations in parallel and store in dict attribute
        if not os.environ.get('LOGLEVEL') == 'DEBUG':
            for i in interpolations:
                i.start()
            while len(self.interps.keys()) < len(vartypes):
                obj = q.get()
                self.interps[obj[0]] = obj[1]
            for i in interpolations:
                i.join()

        # debug mode: disable parallelization for nicer stack traces
        elif os.environ.get('LOGLEVEL') == 'DEBUG':
            logging.debug('OCEAN DEBUG MSG: parallelization disabled')
            for i, r, c, v in zip(interpolators, reshapers, columns, vartypes):
                logging.debug(f'interpolating {v}')
                logging.debug(f'{i = }\n{r = }\n{c = }\n{v = }')
                obj = i(**r(c))
                q.put((v, obj))

            while len(self.interps.keys()) < len(vartypes):
                obj = q.get()
                self.interps[obj[0]] = obj[1]
                logging.debug(
                    f'done {obj[0]}... {len(self.interps.keys())}/{len(vartypes)}'
                )

        q.close()

        # set ocean boundaries and interpolator origins
        self.boundaries = kwargs.copy()
        self.origin = center_point(lat=[kwargs['south'], kwargs['north']],
                                   lon=[kwargs['west'], kwargs['east']])
        for v in vartypes:
            self.interps[v].origin = self.origin

        return
示例#6
0
def test_batch_chs():
    fetch_handler('bathy', 'chs', south=45, west=-67, north=46, east=-66)
示例#7
0
def test_batch_era5():
    fetch_handler('wavedir', 'era5', **kwargs)
示例#8
0
def test_batch_hycom():
    fetch_handler('salinity', 'hycom', **kwargs)
示例#9
0
def test_batch_wwiii():
    fetch_handler('wind_uv', 'wwiii', **kwargs)