コード例 #1
0
import shapely
import shapely.geometry
import shapely.wkt
import xarray

import PyOFS
from PyOFS import (
    CRS_EPSG,
    DATA_DIRECTORY,
    LEAFLET_NODATA_VALUE,
    TIFF_CREATION_OPTIONS,
    get_logger,
    utilities,
)

LOGGER = get_logger('PyOFS.VIIRS')

VIIRS_START_TIME = datetime.strptime('2012-03-01 00:10:00',
                                     '%Y-%m-%d %H:%M:%S')
VIIRS_PERIOD = timedelta(days=16)

PASS_TIMES_FILENAME = DATA_DIRECTORY / 'reference' / 'viirs_pass_times.txt'
STUDY_AREA_POLYGON_FILENAME = DATA_DIRECTORY / 'reference' / 'wcofs.gpkg:study_area'

OUTPUT_CRS = fiona.crs.from_epsg(CRS_EPSG)

NRT_DELAY = timedelta(hours=2)

SOURCE_URLS = OrderedDict({
    'OpenDAP':
    OrderedDict({
コード例 #2
0
ファイル: abi.py プロジェクト: noaa-ocs-modeling/PyOFS
import shapely
import shapely.geometry
import shapely.wkt
import xarray

import PyOFS
from PyOFS import (
    CRS_EPSG,
    DATA_DIRECTORY,
    LEAFLET_NODATA_VALUE,
    TIFF_CREATION_OPTIONS,
    get_logger,
    utilities,
)

LOGGER = get_logger('PyOFS.ABI')

STUDY_AREA_POLYGON_FILENAME = DATA_DIRECTORY / 'reference' / 'wcofs.gpkg:study_area'

OUTPUT_CRS = fiona.crs.from_epsg(CRS_EPSG)

NRT_DELAY = timedelta(hours=2)

SOURCE_URLS = OrderedDict({
    'OpenDAP':
    OrderedDict({
        'NESDIS':
        'https://www.star.nesdis.noaa.gov/thredds/dodsC',
        'JPL':
        'https://podaac-opendap.jpl.nasa.gov:443/opendap/allData/ghrsst/data/GDS2/L3C',
        'NODC':
コード例 #3
0
ファイル: data_buoy.py プロジェクト: noaa-ocs-modeling/PyOFS
from os import PathLike
from pathlib import Path
import re

import fiona
import fiona.crs
import numpy
import requests
import shapely
import shapely.geometry
import xarray

import PyOFS
from PyOFS import CRS_EPSG, DATA_DIRECTORY, get_logger, utilities

LOGGER = get_logger('PyOFS.NDBC')

MEASUREMENT_VARIABLES = [
    'water_temperature',
    'conductivity',
    'salinity',
    'o2_saturation',
    'dissolved_oxygen',
    'chlorophyll_concentration',
    'turbidity',
    'water_ph',
    'water_eh',
]

OUTPUT_CRS = fiona.crs.from_epsg(CRS_EPSG)
コード例 #4
0
import os
from os import PathLike
from pathlib import Path
from datetime import datetime, timedelta
import boto3
from botocore.exceptions import NoCredentialsError

from PyOFS import get_logger

LOGGER = get_logger('PyOFS.azure')


def upload_to_azure(
    local_path: PathLike,
    remote_path: PathLike,
    credentials: str,
    overwrite: bool = False,
    azcopy_path: PathLike = None,
    **kwargs,
):
    if not isinstance(azcopy_path, Path):
        azcopy_path = Path(azcopy_path)

    LOGGER.info(f'Uploading {local_path} to {remote_path}')

    os.environ['AZCOPY_CRED_TYPE'] = 'Anonymous'
    if azcopy_path is not None:
        azcopy_dir = azcopy_path.parent
        azcopy_filename = azcopy_path.name
        os.chdir(azcopy_dir)
    else:
コード例 #5
0
from concurrent import futures
from datetime import datetime, timedelta
import os
from os import PathLike
from pathlib import Path

import numpy
import xarray

from PyOFS import DATA_DIRECTORY, get_logger
from PyOFS.model import wcofs
from PyOFS.observation import hf_radar, viirs

LOGGER = get_logger('PyOFS.valid')

WORKSPACE_DIR = DATA_DIRECTORY / 'validation'

# UTC offset of study area
UTC_OFFSET = 8


def to_netcdf(start_time: datetime, end_time: datetime, output_dir: PathLike):
    """
    Writes HFR, VIIRS, and WCOFS data to NetCDF files at the given filenames.

    :param start_time: Start of time interval.
    :param end_time: End of time interval.
    :param output_dir: Output directory.
    """

    if not isinstance(output_dir, Path):
コード例 #6
0
ファイル: hf_radar.py プロジェクト: noaa-ocs-modeling/PyOFS
from os import PathLike
from pathlib import Path
from typing import Collection

import fiona
import fiona.crs
import numpy
import rasterio
from rasterio.enums import Resampling
import scipy.interpolate
import xarray

import PyOFS
from PyOFS import CRS_EPSG, LEAFLET_NODATA_VALUE, TIFF_CREATION_OPTIONS, get_logger

LOGGER = get_logger('PyOFS.HFR')

DATA_VARIABLES = {'ssu': 'u', 'ssv': 'v', 'dopx': 'dopx', 'dopy': 'dopy'}

OUTPUT_CRS = fiona.crs.from_epsg(CRS_EPSG)

NRT_DELAY = timedelta(hours=1)

# either UCSD (University of California San Diego) or NDBC (National Data Buoy Center); NDBC has larger extent but only for the past 4 days
SOURCE_URLS = {
    'NDBC': 'https://dods.ndbc.noaa.gov/thredds/dodsC',
    'UCSD': 'http://hfrnet-tds.ucsd.edu/thredds/dodsC/HFR/USWC',
}


class HFRadarRange:
コード例 #7
0
import rasterio.mask
import rasterio.warp
from shapely import geometry
import xarray

import PyOFS
from PyOFS import (
    CRS_EPSG,
    DATA_DIRECTORY,
    LEAFLET_NODATA_VALUE,
    TIFF_CREATION_OPTIONS,
    get_logger,
    utilities,
)

LOGGER = get_logger('PyOFS.RTOFS')

OUTPUT_CRS = fiona.crs.from_epsg(CRS_EPSG)

COORDINATE_VARIABLES = ['time', 'lev', 'lat', 'lon']

DATASET_STRUCTURE = {
    '2ds': {
        'nowcast': {
            'prog': ['sss', 'sst', 'u_velocity', 'v_velocity'],
            'diag': ['ssh', 'ice_coverage', 'ice_thickness'],
        },
        'forecast': {
            'prog': ['sss', 'sst', 'u_velocity', 'v_velocity'],
            'diag': ['ssh', 'ice_coverage', 'ice_thickness'],
        },
コード例 #8
0
ファイル: smap.py プロジェクト: noaa-ocs-modeling/PyOFS
import shapely.geometry
import shapely.wkt
import xarray

import PyOFS
from PyOFS import (
    CRS_EPSG,
    DATA_DIRECTORY,
    LEAFLET_NODATA_VALUE,
    NoDataError,
    TIFF_CREATION_OPTIONS,
    get_logger,
    utilities,
)

LOGGER = get_logger('PyOFS.SMAP')

STUDY_AREA_POLYGON_FILENAME = DATA_DIRECTORY / 'reference' / 'wcofs.gpkg:study_area'

OUTPUT_CRS = fiona.crs.from_epsg(CRS_EPSG)

SOURCE_URLS = OrderedDict({
    'OpenDAP':
    OrderedDict({
        'JPL':
        'https://thredds.jpl.nasa.gov/thredds/dodsC/ncml_aggregation/SalinityDensity/smap/aggregate__SMAP_JPL_L3_SSS_CAP_MONTHLY_V42.ncml',
    })
})


class SMAPDataset:
コード例 #9
0
from datetime import datetime, timedelta
from os import PathLike
from pathlib import Path

from PyOFS import DATA_DIRECTORY, get_logger
from main.leaflet import write_json

LOGGER = get_logger('PyOFS.check')

observations = {'hfr': ['dir', 'mag'], 'viirs': ['sst']}
models = {
    'wcofs': ['dir', 'mag', 'sst', 'ssh', 'sss'],
    'rtofs': ['dir', 'mag', 'sst', 'ssh', 'sss'],
}
time_deltas = ['n001', 'f001', 'f002', 'f003']


def check_files(input_dir: PathLike) -> dict:
    if not isinstance(input_dir, Path):
        input_dir = Path(input_dir)

    missing_files = {}

    structure = write_json.get_directory_structure(input_dir)

    for day, filenames in structure['output']['daily_averages'].items():
        for observation, variables in observations.items():
            for variable in variables:
                if variable in ['dir', 'mag']:
                    extension = 'asc'
                else:
コード例 #10
0
ファイル: download.py プロジェクト: noaa-ocs-modeling/PyOFS
            avg_dir,
            fwd_dir,
            obs_dir,
            mod_dir,
    ] + list(day_directories.values()):  # experimental_dir]:
        if not directory.exists():
            os.makedirs(directory, exist_ok=True)

    # define log filename
    log_path = LOG_DIRECTORY / f'{datetime.now():%Y%m%d}_download.log'

    # check whether logfile exists
    log_exists = log_path.exists()

    logger = get_logger('download',
                        log_path,
                        file_level=logging.INFO,
                        console_level=logging.DEBUG)

    # write initial message
    logger.info('Starting FTP transfer...')

    # instantiate FTP connection
    with ftplib.FTP(TIDEPOOL_URL) as ftp_connection:
        ftp_connection.login()

        path_map = {}
        for input_path in ftp_connection.nlst(INPUT_DIRECTORY):
            filename = os.path.basename(input_path)

            if 'rtofs' in filename:
                output_path = rtofs_dir / filename
コード例 #11
0
LOG_DIRECTORY = DATA_DIRECTORY / 'log'
LOG_FILENAME = LOG_DIRECTORY / f'{datetime.now():%Y%m%d}_conversion.log'
OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output'
REFERENCE_DIRECTORY = DATA_DIRECTORY / 'reference'

# offset from study area to UTC
STUDY_AREA_TIMEZONE = 'US/Pacific'
STUDY_AREA_TO_UTC = timedelta(
    hours=-datetime.now(pytz.timezone(STUDY_AREA_TIMEZONE)).utcoffset() /
    timedelta(hours=1))

# range of day deltas that models reach
MODEL_DAY_DELTAS = {'WCOFS': range(-1, 3), 'RTOFS': range(-3, 9)}

LOGGER = get_logger('PyOFS',
                    LOG_FILENAME,
                    file_level=logging.INFO,
                    console_level=logging.INFO)


def write_observation(output_dir: PathLike, observation_date: Union[datetime,
                                                                    date],
                      observation: str):
    """
    Writes daily average of observational data on given date.

    :param output_dir: output directory to write files
    :param observation_date: fate of observation
    :param observation: observation to write
    :raise _utilities.NoDataError: if no data found
    """
コード例 #12
0
ファイル: utilities.py プロジェクト: noaa-ocs-modeling/PyOFS
import shapely
from shapely.geometry import shape
from shapely.ops import transform
import xarray

from PyOFS import get_logger, split_layer_filename

WGS84 = pyproj.Proj('+proj=longlat +datum=WGS84 +no_defs')
WEB_MERCATOR = pyproj.Proj(
    '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs'
)

GRAVITATIONAL_ACCELERATION = 9.80665  # meters per second squared
SIDEREAL_ROTATION_PERIOD = timedelta(hours=23, minutes=56, seconds=4.1)

LOGGER = get_logger('PyOFS.utili')


def copy_xarray(input_path: str, output_path: str) -> xarray.Dataset:
    """
    Copy given xarray observation to a local file at the given path.

    :param input_path: path to observation to copy
    :param output_path: path to output file
    :return: copied observation at given path
    """

    LOGGER.info(f'Reading observation from {input_path}')

    input_dataset = xarray.open_dataset(input_path, decode_times=False)
コード例 #13
0
ファイル: analysis.py プロジェクト: noaa-ocs-modeling/PyOFS
from datetime import datetime, timedelta
from pathlib import Path

import fiona
from matplotlib import pyplot
import numpy
import pandas
from shapely import geometry

from PyOFS import DATA_DIRECTORY, get_logger

LOGGER = get_logger('PyOFS.track')


def diffusion(polygons: [geometry.Polygon]):
    for polygon in polygons:
        centroid = polygon.centroid

        max_radius = max(
            centroid.distance(vertex)
            for vertex in (geometry.Point(point) for point in zip(*polygon.exterior.xy))
        )

        radius_interval = 500

        for radius in range(radius_interval, max_radius, step=radius_interval):
            analysis_area = geometry.Polygon(
                centroid.buffer(radius + radius_interval), centroid.buffer(radius)
            )
            polygon.intersection(analysis_area)