Exemple #1
0
    def create_netcdf(self, filename):
        """
        Create a compressed netCDF4 (.nc) file from the radial instance
        :param filename: User defined filename of radial file you want to save
        :return:
        """
        create_dir(os.path.dirname(filename))

        xds = self.to_xarray(enhance=True)

        encoding = make_encoding(xds, comp_level=4, fillvalue=np.nan)
        encoding['bearing'] = dict(zlib=False, _FillValue=None)
        encoding['range'] = dict(zlib=False, _FillValue=None)
        encoding['time'] = dict(zlib=False, _FillValue=None)

        xds.to_netcdf(filename,
                      encoding=encoding,
                      format='netCDF4',
                      engine='netcdf4',
                      unlimited_dims=['time'])
Exemple #2
0
def main(files, save_dir, average, error):
    """
    This function averages CODAR surface current data over a given time period.
    :param fname: Path to netCDF file or directory containing netCDF files. If directory, must use wildcard (/path/*.nc)
    :param save_dir: Directory to save averaged surface current netCDF file
    :param average: Average by time. Default: time.month; See: http://xarray.pydata.org/en/latest/time-series.html#datetime-components
    :param g_u: Error in Eastward seawater velocity (u). Default: .6
    :param g_v: Error in Northward seawater velocity (v). Default: .6
    """
    # open netcdf file or files
    ds = xr.open_mfdataset(files, combine='by_coords')

    # filter both u and v by .6. anything over will be removed
    ds = filter_by_error(ds, error[0], error[1])

    # group dataset by
    new_ds = ds.groupby(average).apply(custom_mean)

    # Create save directory if it doesn't exist
    create_dir(save_dir)

    # Output dataset containing grouped averages to netcdf
    new_ds.to_netcdf(os.path.join(save_dir, 'codar_monthly_average.nc'))
def main(grid,
         mat_file,
         save_dir,
         user_attributes,
         flags=None,
         domain=[],
         method='oi'):
    """
    Convert MAT files created using the hfrProgs MATLAB toolbox into CF-1.6/NCEI Grid 2.0 compliant netCDF4 files
    :param grid: CSV file containing lon,lat grid information
    :param mat_file: Filepath to MAT file containing HFRProgs
    :param save_dir: Directory to save netCDF files to
    :param user_attributes: User defined dataset attributes for netCDF global attribute. Required for CF/NCEI compliance
    :param flags: Dictionary of thresholds at which we should filter data above
    :param method: 'oi' or 'lsq'. OI is optimal interpolation. LSQ is unweighted least squares
    """
    fname = os.path.basename(mat_file)
    try:
        # load .mat file
        data = loadmat(mat_file, squeeze_me=True, struct_as_record=False)
        logging.debug('{} - MAT file successfully loaded '.format(fname))
    except Exception as err:
        logging.error('{} - {}. MAT file could not be loaded.'.format(
            fname, err))
        return

    if not domain:
        domain = data['TUV'].DomainName
        if not domain:
            domain = 'MARA'
    else:
        domain = 'MARA'

    time = timestamp_from_lluv_filename(mat_file)

    # convert matlab time to python datetime
    # time = dt.datetime.strptime(mat_time, '%Y_%m_%d_%H00')
    time_index = pd.date_range(
        time.strftime('%Y-%m-%d %H:%M:%S'),
        periods=1)  # create pandas datetimeindex from time
    time_string = time.strftime(
        '%Y%m%dT%H%M%SZ')  # create timestring from time

    file_name = 'RU_{}_{}.nc'.format(domain, time_string)
    file_and_path = os.path.join(save_dir, file_name)

    try:
        logging.debug('{} - Saving file data to variables'.format(fname))
        # load longitude and latitude data associated with variables
        lonlat = data['TUV'].LonLat.astype(np.float32)

        # create variables for eastward and northward velocities
        u = data['TUV'].U.astype(np.float32)
        v = data['TUV'].V.astype(np.float32)
        u_units = data['TUV'].UUnits
        v_units = data['TUV'].VUnits

        maxspd = data['TUV'].OtherMetadata.cleanTotals.maxspd

        if method == 'oi':
            # create variables for associated error values
            u_err = data['TUV'].ErrorEstimates.Uerr.astype(np.float32)
            v_err = data['TUV'].ErrorEstimates.Verr.astype(np.float32)
            uv_covariance = data['TUV'].ErrorEstimates.UVCovariance

            # Data Processing Information
            num_rads = data[
                'TUV'].OtherMatrixVars.makeTotalsOI_TotalsNumRads.astype(int)
            min_rads = data[
                'TUV'].OtherMetadata.makeTotalsOI.parameters.MinNumRads
            min_sites = data[
                'TUV'].OtherMetadata.makeTotalsOI.parameters.MinNumSites
            mdlvar = data['TUV'].OtherMetadata.makeTotalsOI.parameters.mdlvar
            errvar = data['TUV'].OtherMetadata.makeTotalsOI.parameters.errvar
            sx = data['TUV'].OtherMetadata.makeTotalsOI.parameters.sx
            sy = data['TUV'].OtherMetadata.makeTotalsOI.parameters.sy
            temporal_threshold = data[
                'TUV'].OtherMetadata.makeTotalsOI.parameters.tempthresh
            processing_parameters = [
                maxspd, min_sites, min_rads, temporal_threshold, sx, sy,
                mdlvar, errvar
            ]
            processing_parameters_info = '1) Maximum Total Speed Threshold (cm s-1)\n'
            processing_parameters_info += '2) Minimum number of radial sites\n'
            processing_parameters_info += '3) Minimum number of radial vectors\n'
            processing_parameters_info += '4) Temporal search window for radial solutions (Fraction of a day)\n'
            processing_parameters_info += '5) Decorrelation scales in the north direction\n'
            processing_parameters_info += '6) Decorrelation scales in the east direction\n'
            processing_parameters_info += '7) Signal variance of the surface current fields (cm2 s-2)\n'
            processing_parameters_info += '8) Data error variance of the input radial velocities (cm2 s-2)\n'

        elif method == 'lsq':
            # create variables for associated error values
            u_err = data['TUV'].ErrorEstimates[1].Uerr.astype(np.float32)
            v_err = data['TUV'].ErrorEstimates[1].Verr.astype(np.float32)
            uv_covariance = data['TUV'].ErrorEstimates[1].UVCovariance.astype(
                np.float32)

            # Data Processing Information
            num_rads = data[
                'TUV'].OtherMatrixVars.makeTotals_TotalsNumRads.astype(int)
            min_rads = data[
                'TUV'].OtherMetadata.makeTotals.parameters.MinNumRads
            min_sites = data[
                'TUV'].OtherMetadata.makeTotals.parameters.MinNumSites
            spatial_threshold = data[
                'TUV'].OtherMetadata.makeTotals.parameters.spatthresh
            temporal_threshold = data[
                'TUV'].OtherMetadata.makeTotals.parameters.tempthresh
            processing_parameters = [
                maxspd, min_sites, min_rads, temporal_threshold,
                spatial_threshold
            ]
            processing_parameters_info = '1) Maximum Total Speed Threshold (cm s-1)\n'
            processing_parameters_info += '2) Minimum number of radial sites.\n'
            processing_parameters_info += '3) Minimum number of radial vectors.\n'
            processing_parameters_info += '4) Temporal search window for radial solutions (Fractions of a day)\n'
            processing_parameters_info += '5) Spatial search radius for radial solutions (km)\n'
    except AttributeError as err:
        logging.error(
            '{} - {}. MAT file missing variable needed to create netCDF4 file'.
            format(fname, err))
        return

    # Create a grid to shape 1d data
    lon = np.unique(grid['lon'].values.astype(np.float32))
    lat = np.unique(grid['lat'].values.astype(np.float32))
    [x, y] = np.meshgrid(lon, lat)

    # Create a dictionary of variables that we want to grid
    data_dict = dict(
        u=u,
        v=v,
        u_err=u_err,
        v_err=v_err,
        uv_covariance=uv_covariance,
        num_radials=num_rads,
    )

    logging.debug('{} - Gridding data to 2d grid'.format(fname))

    # convert 1d data into 2d gridded form. data_dict must be a dictionary.
    x_ind, y_ind = gridded_index(x, y, lonlat[:, 0], lonlat[:, 1])

    for key in data_dict.keys():
        temp_data = mb.tile(np.nan, x.shape)
        temp_data[(y_ind, x_ind)] = data_dict[key]

        # expand dimensions for time and depth
        count = 0
        while count < 2:  # add two dimensions to from of array for time and z (depth)
            temp_data = np.expand_dims(temp_data, axis=0)
            count = count + 1
            data_dict[key] = temp_data

    logging.debug('{} - Loading data into xarray dataset'.format(fname))

    # initialize xarray dataset. Add variables. Add coordinates
    ds = xr.Dataset()
    coords = ('time', 'z', 'lat', 'lon')
    ds['u'] = (coords, np.float32(data_dict['u']))
    ds['v'] = (coords, np.float32(data_dict['v']))
    ds['u_err'] = (coords, np.float32(data_dict['u_err']))
    ds['v_err'] = (coords, np.float32(data_dict['v_err']))
    ds['uv_covariance'] = (coords, np.float32(data_dict['uv_covariance']))
    ds['num_radials'] = (coords, data_dict['num_radials'])

    ds.coords['lon'] = lon
    ds.coords['lat'] = lat
    ds.coords['z'] = np.array([np.float32(0)])
    ds.coords['time'] = time_index

    if flags:
        for k, v in flags.items():
            ds = ds.where(ds[k] <= v)

    ds['processing_parameters'] = (('parameters'), processing_parameters)

    # Grab min and max time in dataset for entry into global attributes for cf compliance
    time_start = ds['time'].min().data
    time_end = ds['time'].max().data

    global_attributes = configs.netcdf_global_attributes(
        user_attributes, time_start, time_end)

    global_attributes['geospatial_lat_min'] = lat.min()
    global_attributes['geospatial_lat_max'] = lat.max()
    global_attributes['geospatial_lon_min'] = lon.min()
    global_attributes['geospatial_lon_max'] = lon.max()
    if method == 'oi':
        global_attributes['method'] = 'Optimal Interpolation'
    elif method == 'lsq':
        global_attributes['method'] = 'Unweighted Least Squares'

    logging.debug('{} - Assigning global attributes to dataset'.format(fname))
    ds = ds.assign_attrs(global_attributes)

    logging.debug(
        '{} - Assigning local attributes to each variable in dataset'.format(
            fname))
    # set time attribute
    ds['time'].attrs['standard_name'] = 'time'

    # Set lon attributes
    ds['lon'].attrs['long_name'] = 'Longitude'
    ds['lon'].attrs['standard_name'] = 'longitude'
    ds['lon'].attrs['short_name'] = 'lon'
    ds['lon'].attrs['units'] = 'degrees_east'
    ds['lon'].attrs['axis'] = 'X'
    ds['lon'].attrs['valid_min'] = np.float32(-180.0)
    ds['lon'].attrs['valid_max'] = np.float32(180.0)

    # Set lat attributes
    ds['lat'].attrs['long_name'] = 'Latitude'
    ds['lat'].attrs['standard_name'] = 'latitude'
    ds['lat'].attrs['short_name'] = 'lat'
    ds['lat'].attrs['units'] = 'degrees_north'
    ds['lat'].attrs['axis'] = 'Y'
    ds['lat'].attrs['valid_min'] = np.float32(-90.0)
    ds['lat'].attrs['valid_max'] = np.float32(90.0)

    # Set depth attributes
    ds['z'].attrs['long_name'] = 'Average Depth of Sensor'
    ds['z'].attrs['standard_name'] = 'depth'
    ds['z'].attrs['comment'] = 'Derived from mean value of depth variable'
    ds['z'].attrs['units'] = 'm'
    ds['z'].attrs['axis'] = 'Z'
    ds['z'].attrs['positive'] = 'down'

    # Set u attributes
    ds['u'].attrs['long_name'] = 'Eastward Surface Current (cm/s)'
    ds['u'].attrs['standard_name'] = 'surface_eastward_sea_water_velocity'
    ds['u'].attrs['short_name'] = 'u'
    ds['u'].attrs['units'] = u_units
    ds['u'].attrs['valid_min'] = np.float32(-300)
    ds['u'].attrs['valid_max'] = np.float32(300)
    ds['u'].attrs['coordinates'] = 'lon lat'
    ds['u'].attrs['grid_mapping'] = 'crs'

    # Set v attributes
    ds['v'].attrs['long_name'] = 'Northward Surface Current (cm/s)'
    ds['v'].attrs['standard_name'] = 'surface_northward_sea_water_velocity'
    ds['v'].attrs['short_name'] = 'v'
    ds['v'].attrs['units'] = v_units
    ds['v'].attrs['valid_min'] = np.float32(-300)
    ds['v'].attrs['valid_max'] = np.float32(300)
    ds['v'].attrs['coordinates'] = 'lon lat'
    ds['v'].attrs['grid_mapping'] = 'crs'

    # Set u_err attributes
    ds['u_err'].attrs['units'] = '1'
    ds['u_err'].attrs['valid_min'] = np.float32(0)
    ds['u_err'].attrs['valid_max'] = np.float32(1)
    ds['u_err'].attrs['coordinates'] = 'lon lat'
    ds['u_err'].attrs['grid_mapping'] = 'crs'

    # Set v_err attributes
    ds['v_err'].attrs['units'] = '1'
    ds['v_err'].attrs['valid_min'] = np.float32(0)
    ds['v_err'].attrs['valid_max'] = np.float32(1)
    ds['v_err'].attrs['coordinates'] = 'lon lat'
    ds['v_err'].attrs['grid_mapping'] = 'crs'

    if method == 'lsq':
        ds['u_err'].attrs[
            'long_name'] = 'Associated GDOP mapping error value associated with eastward velocity component'
        ds['v_err'].attrs[
            'long_name'] = 'Associated GDOP mapping error value associated with northward velocity component'
        ds['u_err'].attrs[
            'comment'] = 'velocity measurements with error values over 1.5 are of questionable quality'
        ds['v_err'].attrs[
            'comment'] = 'velocity measurements with error values over 1.5 are of questionable quality'
    elif method == 'oi':
        ds['u_err'].attrs[
            'long_name'] = 'Normalized uncertainty error associated with eastward velocity component'
        ds['v_err'].attrs[
            'long_name'] = 'Normalized uncertainty error associated with northward velocity component'
        ds['u_err'].attrs[
            'comment'] = 'velocity measurements with error values over 0.6 are of questionable quality'
        ds['v_err'].attrs[
            'comment'] = 'velocity measurements with error values over 0.6 are of questionable quality'

    # Set uv_covariance attributes
    ds['uv_covariance'].attrs[
        'long_name'] = 'Eastward and Northward covariance directional information of u and v'
    ds['uv_covariance'].attrs['units'] = '1'
    ds['uv_covariance'].attrs['comment'] = 'directional information of u and v'
    ds['uv_covariance'].attrs['coordinates'] = 'lon lat'
    ds['uv_covariance'].attrs['grid_mapping'] = 'crs'

    # Set num_radials attributes
    ds['num_radials'].attrs[
        'long_name'] = 'Number of radial measurements used to calculate each totals velocity'
    ds['num_radials'].attrs[
        'comment'] = 'totals are not calculated with fewer than 3 contributing radial measurements from 2 sites'
    ds['num_radials'].attrs['coordinates'] = 'lon lat'
    ds['num_radials'].attrs['grid_mapping'] = 'crs'

    # Set num_radials attributes
    ds['processing_parameters'].attrs[
        'long_name'] = 'General and method specific processing parameter information'
    ds['processing_parameters'].attrs['comment'] = processing_parameters_info
    # ds['processing_parameters'].attrs['coordinates'] = 'parameters'

    # encoded_sites = data['TUV'].OtherMatrixVars.makeTotalsOI_TotalsSiteCode

    # # load site ids that are set in our mysqldb
    # query_obj = Session.query(tables.Sites)
    # site_encoding = pd.read_sql(query_obj.statement, query_obj.session.bind)
    #
    # # convert site codes into binary numbers
    # binary_positions_mat = data['conf'].Radials.Sites.shape[0]
    #
    # decoded_sites_mat = np.tile(0, (encoded_sites.shape[0], binary_positions_mat))
    #
    # for i, v in enumerate(encoded_sites):
    #     decoded_sites_mat[i] = np.array(map(int, np.binary_repr(v, width=binary_positions_mat)))
    #
    # decoded_sites_mat = np.fliplr(decoded_sites_mat)
    #
    # decoded_sites_new = np.tile(0, (encoded_sites.shape[0], np.max(site_encoding['id'])))
    #
    # for site in data['RTUV']:
    #     print site.SiteName + ' ' + str(np.log2(site.SiteCode))
    #     ind_mat = np.log2(site.SiteCode)
    #     ind_real = site_encoding['id'].loc[site_encoding['site'] == site.SiteName].values[0]
    #     decoded_sites_new[:, ind_real] = decoded_sites_mat[:, int(ind_mat)]
    #
    # decoded_sites_new = np.fliplr(decoded_sites_new)
    # new_encoded_sites = [bool2int(x[::-1]) for x in decoded_sites_new]
    # flag_masks = [2 ** int(x) for x in site_encoding['id'].tolist()]
    # flag_meanings = ' '.join(site_encoding['site'].tolist())

    # ds['site_code_flags'].attrs['long_name'] = 'Bitwise AND representation of site contributions to a radial point'
    # # ds['site_code_flags'].attrs['_FillValue'] = int(0)
    # ds['site_code_flags'].attrs['flag_masks'] = 'b '.join(map(str, flag_masks))
    # ds['site_code_flags'].attrs['flag_meanings'] = flag_meanings
    # ds['site_code_flags'].attrs['comment'] = 'Values are binary sums. Must be converted to binary representation to interpret flag_masks and flag_meanings'

    logging.debug(
        '{} - Setting variable encoding and fill values for netCDF4 output'.
        format(fname))

    # encode variables for export to netcdf
    encoding = make_encoding(ds)
    encoding['lon'] = dict(zlib=False, _FillValue=False)
    encoding['lat'] = dict(zlib=False, _FillValue=False)
    encoding['z'] = dict(zlib=False, _FillValue=False)

    # add container variables that contain no data
    kwargs = dict(crs=None, instrument=None)
    ds = ds.assign(**kwargs)

    # Set crs attributes
    ds['crs'].attrs['grid_mapping_name'] = 'latitude_longitude'
    ds['crs'].attrs['inverse_flattening'] = 298.257223563
    ds['crs'].attrs['long_name'] = 'Coordinate Reference System'
    ds['crs'].attrs['semi_major_axis'] = '6378137.0'
    ds['crs'].attrs['epsg_code'] = 'EPSG:4326'
    ds['crs'].attrs['comment'] = 'http://www.opengis.net/def/crs/EPSG/0/4326'

    ds['instrument'].attrs['long_name'] = 'CODAR SeaSonde High Frequency Radar'
    ds['instrument'].attrs[
        'sensor_type'] = 'Direction-finding high frequency radar antenna'
    ds['instrument'].attrs['make_model'] = 'CODAR SeaSonde'
    ds['instrument'].attrs['serial_number'] = 1

    # Create save directory if it doesn't exist.
    create_dir(save_dir)

    logging.debug('{} - Saving dataset to netCDF4 file: {}'.format(
        fname, file_and_path))
    ds.to_netcdf(file_and_path,
                 encoding=encoding,
                 format='netCDF4',
                 engine='netcdf4',
                 unlimited_dims=['time'])
    logging.info('{} - netCDF4 file successfully created: {}'.format(
        fname, file_and_path))
Exemple #4
0
    files = sorted(
        [f.path for f in os.scandir(site) if f.name.endswith(types)])

    # if files_grabbed is not empty
    if files:
        # iterate through each file in the list files_grabbed
        for filename in files:
            basename = os.path.basename(filename)
            # grab the filename from the path/filename combo, filename
            timestamp = timestamp_from_lluv_filename(basename)

            # create directory name in the format yyyy_mm
            new_dir = os.path.join(site, timestamp.strftime('%Y_%m'))

            # try to create the directory.
            create_dir(new_dir)

            if base_date > timestamp:  # if file is older than 30 days
                if os.path.isfile(os.path.join(
                        new_dir, basename)):  # file archived already?
                    # if it is, remove the file from the main folder of the site directory
                    logging.info(
                        '{} is older than 30 days and already archived in {}. Deleting from source directory.'
                        .format(basename, new_dir))
                    os.remove(filename)
                else:
                    # move it to the subdirectory, yyyy_mm
                    logging.info(
                        '{} is older than 30 days and is not archived. Moving to {}'
                        .format(basename, new_dir))
                    shutil.move(filename, new_dir)
# Gridlines and grid labels
gl = ax.gridlines(draw_labels=True,
                  linewidth=1,
                  color='black',
                  alpha=0.5,
                  linestyle='--')
gl.xlabels_top = gl.ylabels_right = False
gl.xlabel_style = {'size': 15, 'color': 'gray'}
gl.ylabel_style = {'size': 15, 'color': 'gray'}
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER

# Axes properties and features
ax.set_extent([-76.5, -68.5, 35, 42.75])
ax.add_feature(LAND, zorder=0, edgecolor='black')
ax.add_feature(cfeature.LAKES)
ax.add_feature(cfeature.BORDERS)
ax.add_feature(state_lines, edgecolor='black')

fig_size = plt.rcParams["figure.figsize"]
fig_size[0] = 12
fig_size[1] = 8.5
plt.rcParams["figure.figsize"] = fig_size

sname = '{}.png'.format(os.path.basename(f))
save_name = os.path.join(save_dir, sname)
create_dir(save_dir)

plt.savefig(save_name, dpi=resoluton)
plt.close('all')
Exemple #6
0
    def create_ruv(self, filename):
        """
        Create a CODAR Radial (.ruv) file from radial instance
        :param filename: User defined filename of radial file you want to save
        :return:
        """
        create_dir(os.path.dirname(filename))

        with open(filename, 'w') as f:
            # Write header
            for metadata_key, metadata_value in self.metadata.items():
                if 'ProcessedTimeStamp' in metadata_key:
                    break
                else:
                    f.write('%{}: {}\n'.format(metadata_key, metadata_value))

            # Write data tables. Anything beyond the first table is commented out.
            for table in self._tables.keys():
                for table_key, table_value in self._tables[table].items():
                    if table_key != 'data':
                        if (table_key == 'TableType') & (table == '1'):
                            if 'QCTest' in self.metadata:
                                f.write('%QCReference: Quality control reference: IOOS QARTOD HF Radar ver 1.0 May 2016\n')
                                f.write('%QCFlagDefinitions: 1=pass 2=not_evaluated 3=suspect 4=fail 9=missing_data\n')
                                f.write('%QCTestFormat: "test_name [qc_thresholds]: test_result"\n')

                                for test in self.metadata['QCTest']:
                                    f.write('%QCTest: {}\n'.format(test))
                            f.write('%{}: {}\n'.format(table_key, table_value))
                        elif table_key == 'TableColumns':
                            f.write('%TableColumns: {}\n'.format(len(self._tables[table]['data'].columns)))
                        elif table_key == 'TableStart':
                            f.write('%{}: {}\n'.format(table_key, table_value))
                            for line in self._tables[table]['_TableHeader']:
                                f.write(line)
                        elif table_key == '_TableHeader':
                            pass
                        else:
                            f.write('%{}: {}\n'.format(table_key, table_value))

                if 'datetime' in self._tables[table]['data'].keys():
                    self._tables[table]['data'] = self._tables[table]['data'].drop(['datetime'], axis=1)

                if table == '1':
                    # f.write('%{}\n'.format(self._tables[table]['TableColumnTypes']))
                    # Fill NaN with 999.000 which is the standard fill value for codar lluv filesself._tables[table]['TableColumnTypes']
                    self.data = self.data.fillna(999.000)
                    self.data.to_string(f, index=False, justify='center', header=False)
                else:
                    f.write('%%{}\n'.format(self._tables[table]['TableColumnTypes']))
                    self._tables[table]['data'].insert(0, '%%', '%')
                    self._tables[table]['data'] = self._tables[table]['data'].fillna(999.000)
                    self._tables[table]['data'].to_string(f, index=False, justify='center', header=False)

                if int(table) > 1:
                    f.write('\n%TableEnd: {}\n'.format(table))
                else:
                    f.write('\n%TableEnd: \n')
                f.write('%%\n')

            # Write footer containing processing information
            f.write('%ProcessedTimeStamp: {}\n'.format(self.metadata['ProcessedTimeStamp']))
            for tool in self.metadata['ProcessingTool']:
                f.write('%ProcessingTool: {}\n'.format(tool))
                # f.write('%{}: {}\n'.format(footer_key, footer_value))
            f.write('%End:')
Exemple #7
0
def plot_common(time,
                lon,
                lat,
                u,
                v,
                *,
                output_file=None,
                meshgrid=True,
                sub=2,
                velocity_min=None,
                velocity_max=None,
                markers=None,
                title='HF Radar'):
    """
    param markers:  a list of 3-tuple/lists containng [lon, lat, marker kwargs] as should be
                    passed into ax.plot()
                    eg. [
                            [-74.6, 38.5, dict(marker='o', markersize=8, color='r')],
                            [-70.1, 35.2, dict(marker='o', markersize=8, color='b')]
                        ]
    """
    markers = markers or []

    fig = plt.figure()

    u = ma.masked_invalid(u)
    v = ma.masked_invalid(v)

    angle, speed = uv2spdir(u, v)
    us, vs = spdir2uv(np.ones_like(speed), angle, deg=True)

    if meshgrid is True:
        lons, lats = np.meshgrid(lon, lat)
    else:
        lons, lats = lon, lat

    velocity_min = velocity_min or 0
    velocity_max = velocity_max or np.nanmax(speed) or 15

    speed_clipped = np.clip(speed[::sub, ::sub], velocity_min,
                            velocity_max).squeeze()

    fig, ax = plt.subplots(figsize=(11, 8),
                           subplot_kw=dict(projection=ccrs.PlateCarree()))

    # Plot title
    plt.title('{}\n{}'.format(title, time))

    # plot arrows over pcolor
    h = ax.quiver(lons[::sub, ::sub],
                  lats[::sub, ::sub],
                  us[::sub, ::sub],
                  vs[::sub, ::sub],
                  speed_clipped,
                  cmap='jet',
                  scale=60)

    divider = make_axes_locatable(ax)
    cax = divider.new_horizontal(size='5%', pad=0.05, axes_class=plt.Axes)
    fig.add_axes(cax)

    # generate colorbar
    ticks = np.linspace(velocity_min, velocity_max, 5)
    cb = plt.colorbar(h, cax=cax, ticks=ticks)
    cb.ax.set_yticklabels([f'{s:.2f}' for s in ticks])
    cb.set_label('cm/s')

    for m in markers:
        ax.plot(m[0], m[1], **m[2])

    # Gridlines and grid labels
    gl = ax.gridlines(draw_labels=True,
                      linewidth=1,
                      color='black',
                      alpha=0.5,
                      linestyle='--')
    gl.xlabels_top = gl.ylabels_right = False
    gl.xlabel_style = {'size': 10, 'color': 'gray'}
    gl.ylabel_style = {'size': 10, 'color': 'gray'}
    gl.xformatter = LONGITUDE_FORMATTER
    gl.yformatter = LATITUDE_FORMATTER

    # Axes properties and features
    ax.set_extent([lon.min() - 1, lon.max() + 1, lat.min() - 1, lat.max() + 1])
    ax.add_feature(LAND, zorder=0, edgecolor='black')
    ax.add_feature(cfeature.LAKES)
    ax.add_feature(cfeature.BORDERS)
    ax.add_feature(state_lines, edgecolor='black')

    fig_size = plt.rcParams["figure.figsize"]
    fig_size[0] = 12
    fig_size[1] = 8.5
    plt.rcParams["figure.figsize"] = fig_size

    if output_file is not None:
        create_dir(str(Path(output_file).parent))
        resoluton = 150  # plot resolution in DPI
        plt.savefig(output_file, dpi=resoluton)
        plt.close('all')
    else:
        return plt
Exemple #8
0
def main(wave_file, save_dir, wave_min=0.2, wave_max=5):
    """

    :param wave_file: Path to wave file
    :param save_dir: Path to save directory for generated NetCDF4 files
    :param wave_min: Minimum wave height to include in netCDF file
    :param wave_max: Maximum wave height to include in netCDF file
    :return:
    """
    w = Waves(wave_file, multi_dimensional=True)

    # Remove wave heights less than 0.2 and greater than 5 m
    w.data = w.data.where((wave_min < w.data['wave_height'])
                          & (w.data['wave_height'] < wave_max))

    # Grab min and max time in dataset for entry into global attributes for cf compliance
    try:
        time_start = w.data['time'].min().data
    except:
        return
    time_end = w.data['time'].max().data

    lonlat = [float(x) for x in w.data.Origin.split()]

    length = len(w.data['time'])
    w.data['lon'] = xr.DataArray(np.full(length, lonlat[0]), dims=('time'))
    w.data['lat'] = xr.DataArray(np.full(length, lonlat[1]), dims=('time'))

    # Assign global attributes for CF compliant time series files
    global_attr = netcdf_global_attributes(required_attributes, time_start,
                                           time_end)
    ds = w.data.assign_attrs(global_attr)

    # set time attribute
    ds['time'].attrs['standard_name'] = 'time'
    ds['time'].attrs['long_name'] = 'Universal Time Coordinated (UTC) Time'

    # Set wave_height attributes
    ds['wave_height'].attrs['long_name'] = 'wave model height in meters'
    ds['wave_height'].attrs[
        'standard_name'] = 'sea_surface_wave_significant_height'
    ds['wave_height'].attrs['units'] = 'm'
    ds['wave_height'].attrs[
        'comment'] = 'wave model height in meters for every one of three waves'
    ds['wave_height'].attrs['valid_min'] = np.double(0)
    ds['wave_height'].attrs['valid_max'] = np.double(100)
    ds['wave_height'].attrs['coordinates'] = 'time'
    ds['wave_height'].attrs['grid_mapping'] = 'crs'
    ds['wave_height'].attrs['coverage_content_type'] = 'physicalMeasurement'

    # Set wave_period attributes
    ds['wave_period'].attrs['long_name'] = 'wave spectra period in seconds'
    ds['wave_period'].attrs['standard_name'] = 'sea_surface_wave_mean_period'
    ds['wave_period'].attrs['units'] = 's'
    ds['wave_period'].attrs['comment'] = 'wave spectra period in seconds'
    ds['wave_period'].attrs['valid_min'] = np.double(0)
    ds['wave_period'].attrs['valid_max'] = np.double(100)
    ds['wave_period'].attrs['coordinates'] = 'time'
    ds['wave_period'].attrs['grid_mapping'] = 'crs'
    ds['wave_period'].attrs['coverage_content_type'] = 'physicalMeasurement'

    # Set wave_bearing attributes
    ds['wave_bearing'].attrs['long_name'] = 'wave from direction in degrees'
    ds['wave_bearing'].attrs[
        'standard_name'] = 'sea_surface_wave_from_direction'
    ds['wave_bearing'].attrs['units'] = 'degrees'
    ds['wave_bearing'].attrs['comment'] = 'wave from direction in degrees'
    ds['wave_bearing'].attrs['valid_min'] = np.double(0)
    ds['wave_bearing'].attrs['valid_max'] = np.double(360)
    ds['wave_bearing'].attrs['coordinates'] = 'time'
    ds['wave_bearing'].attrs['grid_mapping'] = 'crs'
    ds['wave_bearing'].attrs['coverage_content_type'] = 'physicalMeasurement'

    # Set wind_bearing attributes
    ds['wind_bearing'].attrs['long_name'] = 'wind from direction in degrees'
    ds['wind_bearing'].attrs[
        'standard_name'] = 'sea_surface_wind_wave_from_direction'
    ds['wind_bearing'].attrs['units'] = 'degrees'
    ds['wind_bearing'].attrs['comment'] = 'wind from direction in degrees'
    ds['wind_bearing'].attrs['valid_min'] = np.double(0)
    ds['wind_bearing'].attrs['valid_max'] = np.double(360)
    ds['wind_bearing'].attrs['coordinates'] = 'time'
    ds['wind_bearing'].attrs['grid_mapping'] = 'crs'
    ds['wind_bearing'].attrs['coverage_content_type'] = 'physicalMeasurement'

    # Set lon attributes
    ds['lon'].attrs['long_name'] = 'Longitude'
    ds['lon'].attrs['standard_name'] = 'longitude'
    ds['lon'].attrs['short_name'] = 'lon'
    ds['lon'].attrs['units'] = 'degrees_east'
    ds['lon'].attrs['axis'] = 'X'
    ds['lon'].attrs['valid_min'] = np.double(-180.0)
    ds['lon'].attrs['valid_max'] = np.double(180.0)
    ds['lon'].attrs['grid_mapping'] = 'crs'

    # Set lat attributes
    ds['lat'].attrs['long_name'] = 'Latitude'
    ds['lat'].attrs['standard_name'] = 'latitude'
    ds['lat'].attrs['short_name'] = 'lat'
    ds['lat'].attrs['units'] = 'degrees_north'
    ds['lat'].attrs['axis'] = 'Y'
    ds['lat'].attrs['valid_min'] = np.double(-90.0)
    ds['lat'].attrs['valid_max'] = np.double(90.0)
    ds['lat'].attrs['grid_mapping'] = 'crs'

    encoding = make_encoding(ds)

    # add container variables that contain no data
    kwargs = dict(crs=None, instrument=None)
    ds = ds.assign(**kwargs)

    # Set crs attributes
    ds['crs'].attrs['grid_mapping_name'] = 'latitude_longitude'
    ds['crs'].attrs['inverse_flattening'] = 298.257223563
    ds['crs'].attrs['long_name'] = 'Coordinate Reference System'
    ds['crs'].attrs['semi_major_axis'] = '6378137.0'
    ds['crs'].attrs['epsg_code'] = 'EPSG:4326'
    ds['crs'].attrs['comment'] = 'http://www.opengis.net/def/crs/EPSG/0/4326'

    ds['instrument'].attrs['long_name'] = 'CODAR SeaSonde High Frequency Radar'
    ds['instrument'].attrs[
        'sensor_type'] = 'Direction-finding high frequency radar antenna'
    ds['instrument'].attrs['make_model'] = 'CODAR SeaSonde'
    ds['instrument'].attrs['serial_number'] = 1

    create_dir(save_dir)
    nc_file = '{}.nc'.format(
        os.path.join(save_dir,
                     os.path.basename(wave_file).split('.')[0]))

    # Convert files to netcdf
    ds.to_netcdf(nc_file,
                 encoding=encoding,
                 format='netCDF4',
                 engine='netcdf4',
                 unlimited_dims=['time'])