예제 #1
0
def read_idrisi_data(fname, field_name, fill_value=-99.):
    """
    Reads DEM data from an IDRISI .rst file

    Parameters
    ----------
    fname : str
        name of the file to read
    field_name : str
        name of the readed variable
    fill_value : float
        The fill value

    Returns
    -------
    dem_data : dictionary
        dictionary with the data and metadata

    """
    if not _GDAL_AVAILABLE:
        warn("gdal is required to use read_idrisi_data but is not installed")
        return None

    # read the data
    try:
        raster = gdal.Open(fname)
        raster_array = raster.ReadAsArray()
        raster_array = np.ma.masked_equal(raster_array, fill_value)

        metadata = read_idrisi_metadata(fname)

        if metadata is None:
            return None

        field_dict = get_metadata(field_name)
        field_dict['data'] = np.transpose(raster_array)[:, ::-1]
        field_dict['units'] = metadata['value units']

        x = get_metadata('x')
        y = get_metadata('y')
        x['data'] = (np.arange(raster.RasterXSize) * metadata['resolution'] +
                     metadata['resolution'] / 2. + metadata['min. X'])

        y['data'] = (np.arange(raster.RasterYSize) * metadata['resolution'] +
                     metadata['resolution'] / 2. + metadata['min. Y'])

        dem_data = {
            'metadata': metadata,
            'x': x,
            'y': y,
            field_name: field_dict
        }

        return dem_data
    except EnvironmentError as ee:
        warn(str(ee))
        warn('Unable to read file ' + fname)
        return None
예제 #2
0
def predict_labels(myradar,model,filt=True):

    from pyart.config import get_metadata

    #models[4]
    #m =4

    #fields = ['DBZH','ZDR','RHOHV','WRADH','KDP','ALT']

    gatefilter = None
    pred_data = np.zeros((myradar.fields['DBZH']['data'][:].flatten().size,6))
    orig_shape = np.shape(myradar.fields['DBZH']['data'][:])

    for f in range(len(fields)):
        if fields[f] == 'ALT':
            returns = myradar.gate_z['data'][:].flatten()
            pred_data[:,f] = returns
        else:
            returns = myradar.fields[fields[f]]['data'][:].flatten()
            pred_data[:,f] = returns

        ### Do classification here, assume 1D output

    labels = np.reshape(model.predict(pred_data),orig_shape)
    ['GMM_n' + str(model.n_components)]
    #predict_labels(myradar,model[])

    GMM_field = get_metadata(myradar)
    GMM_field['data'] = labels
    GMM_field['units'] = 'NA'
    GMM_field['standard_name'] = 'GMM_n' + str(model.n_components)
    GMM_field['long_name'] = 'Labels as predict by Gaussian Mixture Model where k = ' + str(model.n_components)
    return GMM_field
예제 #3
0
def get_iso0_field(hzt_data, hzt_ind, z_radar, field_name='height_over_iso0'):
    """
    Get the height over iso0 data corresponding to each radar gate
    using a precomputed look up table of the nearest neighbour

    Parameters
    ----------
    hzt_data : dict
        dictionary containing the HZT data and metadata
    hzt_ind : dict
        dictionary containing a field of HZT indices and metadata
    z_radar : ndarray
        gates altitude [m MSL]
    field_name : str
        names of HZT parameters (default height_over_iso0)

    Returns
    -------
    iso0_field : list of dict
        dictionary with the height over iso0 field and metadata

    """
    nrays, ngates = np.shape(hzt_ind['data'])
    values = hzt_data['HZT']['data'][:, :].flatten()
    field_dict = get_metadata(field_name)
    field_dict['data'] = z_radar - values[hzt_ind['data'].flatten()].reshape(
        nrays, ngates).astype(float)

    return field_dict
예제 #4
0
def dem2radar_data(radar, dem_data, slice_xy=True, field_name='visibility'):
    """
    get the DEM value corresponding to each radar gate using nearest
    neighbour interpolation

    Parameters
    ----------
    radar : Radar
        the radar object containing the information on the position of the
        radar gates
    dem_data : dict
        dictionary containing the DEM data
    slice_xy : boolean
        if true the horizontal plane of the DEM field is cut to the
        dimensions of the radar field
    field_names : str
        names of DEM fields to convert

    Returns
    -------
    dem_field : dict
        Dictionary with the DEM fields and metadata

    """
    # debugging
    # start_time = time.time()

    x_radar, y_radar, _ = _put_radar_in_swiss_coord(radar)

    (x_dem, y_dem, ind_xmin, ind_ymin, ind_xmax,
     ind_ymax) = (_prepare_for_interpolation(x_radar,
                                             y_radar,
                                             dem_data,
                                             slice_xy=slice_xy))

    if field_name not in dem_data:
        warn('DEM field ' + field_name + ' data not available')
        return None

    values = dem_data[field_name]['data'][ind_xmin:ind_xmax + 1,
                                          ind_ymin:ind_ymax + 1].flatten()
    # find interpolation function
    tree_options = {'compact_nodes': False, 'balanced_tree': False}
    interp_func = NearestNDInterpolator((x_dem, y_dem),
                                        values,
                                        tree_options=tree_options)

    del values

    # interpolate
    data_interp = interp_func((x_radar, y_radar))

    # put field
    field_dict = get_metadata(field_name)
    field_dict['data'] = data_interp.astype(float)

    del data_interp

    return field_dict
예제 #5
0
def _make_constant_refl_radar(fill=5.0):
    """ Create radar with constant reflectivity. """

    radar = sample_objects.make_empty_ppi_radar(101, 360, 1)
    refl_dict = get_metadata('reflectivity')
    refl_dict['data'] = np.full((radar.nrays, radar.ngates), fill)
    radar.add_field(get_field_name('reflectivity'), refl_dict)
    return radar
예제 #6
0
파일: common.py 프로젝트: kejingjing/roapy
def _populate_legacy_axes(radar, domain):
    """ Populate legacy grid axes data and metadata. """

    # Populate coordinate information
    x_disp = get_metadata('x')
    x_disp['data'] = domain.x.astype(np.float32)

    y_disp = get_metadata('y')
    y_disp['data'] = domain.y.astype(np.float32)

    z_disp = get_metadata('z')
    z_disp['data'] = domain.z.astype(np.float32)

    # Populate grid origin information
    alt = get_metadata('origin_altitude')
    alt['data'] = np.atleast_1d(domain.alt_0).astype(np.float32)

    lat = get_metadata('origin_latitude')
    lat['data'] = np.atleast_1d(domain.lat_0).astype(np.float32)

    lon = get_metadata('origin_longitude')
    lon['data'] = np.atleast_1d(domain.lon_0).astype(np.float32)

    # Populate grid time information
    time = get_metadata('grid_time')
    time['data'] = np.atleast_1d(radar.time['data'].min()).astype(np.float64)
    time['units'] = radar.time['units']

    time_start = get_metadata('grid_time_start')
    time_start['data'] = np.atleast_1d(
        radar.time['data'].min()).astype(np.float64)
    time_start['units'] = radar.time['units']

    time_end = get_metadata('grid_time_end')
    time_end['data'] = np.atleast_1d(
        radar.time['data'].max()).astype(np.float64)
    time_end['units'] = radar.time['units']

    return {
        'time': time,
        'time_start': time_start,
        'time_end': time_end,
        'x_disp': x_disp,
        'y_disp': y_disp,
        'z_disp': z_disp,
        'alt': alt,
        'lat': lat,
        'lon': lon,
        }
예제 #7
0
def hzt2radar_coord(radar, hzt_coord, slice_xy=True, field_name=None):
    """
    Given the radar coordinates find the nearest HZT pixel

    Parameters
    ----------
    radar : Radar
        the radar object containing the information on the position of the
        radar gates
    hzt_coord : dict
        dictionary containing the HZT coordinates
    slice_xy : boolean
        if true the horizontal plane of the HZT field is cut to the
        dimensions of the radar field
    field_name : str
        name of the field

    Returns
    -------
    hzt_ind_field : dict
        dictionary containing a field of HZT indices and metadata

    """
    # parse the field parameters
    if field_name is None:
        field_name = get_field_name('hzt_index')

    x_radar, y_radar, _ = _put_radar_in_swiss_coord(radar)

    x_hzt, y_hzt, ind_xmin, ind_ymin, ind_xmax, _ = (
        _prepare_for_interpolation(x_radar,
                                   y_radar,
                                   hzt_coord,
                                   slice_xy=slice_xy))

    tree = cKDTree(np.transpose((y_hzt, x_hzt)))
    _, ind_vec = tree.query(np.transpose(
        (y_radar.flatten(), x_radar.flatten())),
                            k=1)

    # put the index in the original cosmo coordinates
    nx_hzt = len(hzt_coord['x']['data'])

    nx = ind_xmax - ind_xmin + 1

    ind_y = (ind_vec / nx).astype(int) + ind_ymin
    ind_x = (ind_vec % nx).astype(int) + ind_xmin
    ind_hzt = (ind_x + nx_hzt * ind_y).astype(int)

    hzt_ind_field = get_metadata(field_name)
    hzt_ind_field['data'] = ind_hzt.reshape(radar.nrays, radar.ngates)

    return hzt_ind_field
예제 #8
0
def hzt2radar_data(radar,
                   hzt_coord,
                   hzt_data,
                   slice_xy=True,
                   field_name='height_over_iso0'):
    """
    get the HZT value corresponding to each radar gate using nearest
    neighbour interpolation

    Parameters
    ----------
    radar : Radar
        the radar object containing the information on the position of the
        radar gates
    hzt_coord : dict
        dictionary containing the HZT coordinates
    hzt_data : dict
        dictionary containing the HZT data
    slice_xy : boolean
        if true the horizontal plane of the COSMO field is cut to the
        dimensions of the radar field
    field_name : str
        name of HZT fields to convert (default height_over_iso0)

    Returns
    -------
    hzt_fields : list of dict
        list of dictionary with the HZT fields and metadata

    """
    x_radar, y_radar, z_radar = _put_radar_in_swiss_coord(radar)

    x_hzt, y_hzt, ind_xmin, ind_ymin, ind_xmax, ind_ymax = (
        _prepare_for_interpolation(x_radar,
                                   y_radar,
                                   hzt_coord,
                                   slice_xy=slice_xy))

    values = hzt_data['HZT']['data'][ind_ymin:ind_ymax + 1,
                                     ind_xmin:ind_xmax + 1].flatten()
    # find interpolation function
    interp_func = NearestNDInterpolator((y_hzt, x_hzt), values)

    # interpolate
    data_interp = interp_func((y_radar, x_radar))

    # put field
    field_dict = get_metadata(field_name)
    field_dict['data'] = (z_radar - data_interp).astype(float)

    return field_dict
예제 #9
0
파일: common.py 프로젝트: kejingjing/roapy
def populate_field(data, inds, shape, field, weights=None, mask=None,
                   fill_value=None):
    """
    Create mapped radar field data dictionary.

    Parameters
    ----------
    data : ndarray
        Input radar data.
    inds : ndarray
        Indices corresponding to the k-nearest neighbours.
    shape : list-like
        Shape of analysis grid.
    field : str
        Field name.
    weights : ndarray, optional
        Distance-dependent weights applied to k-nearest neighbours. Use default
        None for nearest neighbor scheme. Must have same shape as inds.
    mask : ndarray, optional
        Masking will be applied where mask is True. Must have same shape as
        flattened grid.
    fill_value : float, optional
        Value indicating missing or bad data in input data. If None, default
        value in configuration file is used.

    Returns
    -------
    field_dict : dict
        Field dictionary containing data and metadata.

    """

    if fill_value is None:
        fill_value = get_fillvalue()

    if weights is None:
        fq = data[inds]
    else:
        fq = np.ma.average(data[inds], weights=weights, axis=1)

    fq = np.ma.masked_where(mask, fq, copy=False)
    fq.set_fill_value(fill_value)

    # Populate field dictionary
    field_dict = get_metadata(field)
    field_dict['data'] = fq.reshape(shape).astype(np.float32)
    if np.ma.is_masked(fq):
        field_dict['_FillValue'] = fq.fill_value

    return field_dict
예제 #10
0
def main():
    """
    """
    file_base = '/store/msrad/radar/pyrad_products/rad4alp_hydro_PHA/'
    time_dir_list = ['2017-06-29']
    trt_cell_id = '2017062913000174'

    datatype_list = ['dBZc', 'ZDRc', 'RhoHVc', 'KDPc', 'TEMP', 'hydro']
    dataset_list = [
        'reflectivity', 'ZDRc', 'RhoHVc', 'KDPc', 'temperature', 'hydroclass'
    ]

    print("====== Plot time-hist started: %s" %
          datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
    atexit.register(_print_end_msg, "====== Plot time-hist finished: ")

    for time_dir in time_dir_list:
        for i, datatype in enumerate(datatype_list):
            dataset = dataset_list[i]
            file_path = file_base + time_dir + '/' + dataset + '_trt_traj/HISTOGRAM/'
            flist = glob.glob(file_path + '*_' + trt_cell_id +
                              '_histogram_*_' + datatype + '.csv')

            if not flist:
                warn('No histogram files found in ' + file_path +
                     ' for TRT cell ' + trt_cell_id)
                continue

            tbin_edges, bin_edges, data_ma = read_histogram_ts(flist, datatype)

            basepath_out = os.path.dirname(flist[0])
            fname = (basepath_out + '/' + trt_cell_id + '_trt_HISTOGRAM_' +
                     datatype + '.png')
            field_name = get_fieldname_pyart(datatype)
            field_dict = get_metadata(field_name)
            titl = 'TRT cell ' + trt_cell_id + '\n' + get_field_name(
                field_dict, field_name)

            _plot_time_range(tbin_edges,
                             bin_edges,
                             data_ma,
                             'frequency_of_occurrence', [fname],
                             titl=titl,
                             ylabel=get_colobar_label(field_dict, field_name),
                             vmin=0.,
                             vmax=np.max(data_ma),
                             figsize=[10, 8],
                             dpi=72)

            print("----- plot to '%s'" % fname)
예제 #11
0
파일: pyodim.py 프로젝트: vlouf/pyodim
def field_metadata(quantity_name: str) -> Dict:
    """
    Populate metadata for common fields using Py-ART get_metadata() function.
    (Optionnal).

    Parameter:
    ==========
    quantity_name: str
        ODIM H5 quantity attribute name.

    Returns:
    ========
    attrs: dict()
        Metadata dictionnary.
    """
    try:
        from pyart.config import get_metadata
    except Exception:
        return {}
    ODIM_H5_FIELD_NAMES = {
        "TH": "total_power",  # uncorrected reflectivity, horizontal
        "TV": "total_power",  # uncorrected reflectivity, vertical
        "DBZH": "reflectivity",  # corrected reflectivity, horizontal
        "DBZH_CLEAN": "reflectivity",  # corrected reflectivity, horizontal
        "DBZV": "reflectivity",  # corrected reflectivity, vertical
        "ZDR": "differential_reflectivity",  # differential reflectivity
        "RHOHV": "cross_correlation_ratio",
        "LDR": "linear_polarization_ratio",
        "PHIDP": "differential_phase",
        "KDP": "specific_differential_phase",
        "SQI": "normalized_coherent_power",
        "SNR": "signal_to_noise_ratio",
        "SNRH": "signal_to_noise_ratio",
        "VRAD": "velocity",  # radial velocity, marked for deprecation in ODIM HDF5 2.2
        "VRADH": "velocity",  # radial velocity, horizontal polarisation
        "VRADDH": "corrected_velocity",  # radial velocity, horizontal polarisation
        "VRADV": "velocity",  # radial velocity, vertical polarisation
        "WRAD": "spectrum_width",
        "QIND": "quality_index",
    }

    try:
        fname = ODIM_H5_FIELD_NAMES[quantity_name]
        attrs = get_metadata(fname)
        attrs.pop("coordinates")
    except KeyError:
        return {}

    return attrs
예제 #12
0
def get_cosmo_fields(cosmo_data,
                     cosmo_ind,
                     time_index=0,
                     field_names=['temperature']):
    """
    Get the COSMO data corresponding to each radar gate
    using a precomputed look up table of the nearest neighbour

    Parameters
    ----------
    cosmo_data : dict
        dictionary containing the COSMO data and metadata
    cosmo_ind : dict
        dictionary containing a field of COSMO indices and metadata
    time_index : int
        index of the forecasted data
    field_names : str
        names of COSMO parameters (default temperature)

    Returns
    -------
    cosmo_fields : list of dict
        dictionary with the COSMO fields and metadata

    """
    nrays, ngates = np.shape(cosmo_ind['data'])
    cosmo_fields = []
    for field in field_names:
        if field not in cosmo_data:
            warn('COSMO field ' + field + ' data not available')
        else:
            values = cosmo_data[field]['data'][time_index, :, :, :].flatten()

            # put field
            field_dict = get_metadata(field)
            field_dict['data'] = values[cosmo_ind['data'].flatten()].reshape(
                nrays, ngates).astype(float)
            cosmo_fields.append({field: field_dict})

    if not cosmo_fields:
        warn('COSMO data not available')
        return None

    return cosmo_fields
예제 #13
0
def get_field_unit(datatype):
    """
    Return unit of datatype.

    Parameters
    ----------
    datatype : str
        The data type

    Returns
    -------
    unit : str
        The unit

    """
    field_name = get_fieldname_pyart(datatype)
    field_dic = get_metadata(field_name)

    return field_dic['units']
예제 #14
0
def get_field_name(datatype):
    """
    Return long name of datatype.

    Parameters
    ----------
    datatype : str
        The data type

    Returns
    -------
    name : str
        The name

    """
    field_name = get_fieldname_pyart(datatype)
    field_dic = get_metadata(field_name)
    name = field_dic['long_name'].replace('_', ' ')
    name = name[0].upper() + name[1:]

    return name
예제 #15
0
def generate_field_name_str(datatype):
    """
    Generates a field name in a nice to read format.

    Parameters
    ----------
    datatype : str
        The data type

    Returns
    -------
    field_str : str
        The field name

    """
    field_name = get_fieldname_pyart(datatype)
    field_dic = get_metadata(field_name)
    field_str = field_dic['standard_name'].replace('_', ' ')
    field_str = field_str[0].upper() + field_str[1:]
    field_str += ' (' + field_dic['units'] + ')'

    return field_str
예제 #16
0
파일: mapper.py 프로젝트: kejingjing/roapy
def grid_radar_nearest_neighbour(
        radar, domain, fields=None, gatefilter=None, leafsize=10, legacy=False,
        proc=1, dist_field=None, time_field=None, gqi_field=None,
        range_field=None, azimuth_field=None, elevation_field=None,
        debug=False, verbose=False):
    """
    Map volumetric radar data to a rectilinear grid using nearest neighbour.

    Parameters
    ----------
    radar : pyart.core.Radar
        Radar containing the fields to be mapped.
    domain : Domain
        Grid domain.
    fields : sequence of str, optional
        Radar fields to be mapped. If None, all available radar fields are
        mapped.
    gatefilter : pyart.filters.GateFilter, optional
        GateFilter used to determine the grid quality index. If None, no grid
        quality index field is returned.

    Optional parameters
    -------------------
    max_range : float, optional
        Grid points further than `max_range` from radar are excluded from
        mapping. If None, the maximum range of the radar is used.
    leafsize : int, optional
        The number of points at which the search algorithm switches over to
        brute-force. For nearest neighbour schemes this parameter will not
        significantly change processing time.
    legacy : bool, optional
        True to return a legacy Py-ART Grid. Note that the legacy Grid is
        planned for removal altogether in future Py-ART releases.
    proc : int, optional
        Number of processes to use when querying the k-d tree.
    debug : bool, optional
        True to print debugging information, False to suppress.
    verbose : bool, optional
        True to print relevant information, False to suppress.

    Return
    ------
    grid : pyart.core.Grid
        Grid containing the mapped volumetric radar data.

    """

    # Parse field names
    if dist_field is None:
        dist_field = get_field_name('nearest_neighbor_distance')
    if time_field is None:
        time_field = get_field_name('nearest_neighbor_time')
    if gqi_field is None:
        gqi_field = get_field_name('grid_quality_index')
    if range_field is None:
        range_field = get_field_name('range')
    if azimuth_field is None:
        azimuth_field = get_field_name('azimuth')
    if elevation_field is None:
        elevation_field = get_field_name('elevation')

    # Parse fields to map
    if fields is None:
        fields = radar.fields.keys()
    if isinstance(fields, str):
        fields = [fields]
    fields = [field for field in fields if field in radar.fields]

    # Calculate radar offset relative to grid origin
    domain.compute_radar_offset_from_origin(radar, debug=debug)

    # Compute Cartesian coordinates of radar gates and apply origin offset
    zg, yg, xg = transform.equivalent_earth_model(
        radar, offset=domain.radar_offset, debug=debug, verbose=verbose)

    # Create k-d tree for radar gate locations
    # Depending on the number of radar gates this can be resource intensive
    # but nonetheless should take on the order of 1 second to create
    if verbose:
        print('Creating k-d tree instance for radar gate locations')

    tree_radar = cKDTree(
        zip(zg, yg, xg), leafsize=leafsize, compact_nodes=False,
        balanced_tree=False, copy_data=False)

    if debug:
        print('tree_radar.n = {}'.format(tree_radar.n))  # n radar gates
        print('tree_radar.m = {}'.format(tree_radar.m))  # m dimensions

    # Parse grid coordinates
    za, ya, xa = domain.coordinates
    if debug:
        print('Number of x grid points: {}'.format(domain.nx))
        print('Number of y grid points: {}'.format(domain.ny))
        print('Number of z grid points: {}'.format(domain.nz))

    # Query the radar gate k-d tree for nearest radar gates
    # This step consumes a majority of the processing time
    if verbose:
        print('Querying radar k-d tree for nearest radar gates')

    dists, idx = tree_radar.query(
        zip(za, ya, xa), k=1, p=2.0, eps=0.0,
        distance_upper_bound=np.inf, n_jobs=proc)

    if debug:
        print('Distance array shape: {}'.format(dists.shape))
        print('Minimum gate-grid distance: {:.2f} m'.format(dists.min()))
        print('Maximum gate-grid distance: {:.2f} m'.format(dists.max()))
        print('Index array shape: {}'.format(idx.shape))
        print('Minimum index: {}'.format(idx.min()))
        print('Maximum index: {}'.format(idx.max()))

    # Parse maximum range
    # Compute radar pointing directions in grid
    if max_range is None:
        max_range = radar.range['data'].max()

    _range, azimuth, elevation = transform.radar_pointing_directions(
        domain, debug=debug, verbose=verbose)
    is_far = _range > max_range

    if debug:
        n = is_far.sum()
        print('Number of grid points too far from radar: {}'.format(n))

    map_fields = {}
    for field in fields:
        if verbose:
            print('Mapping radar field: {}'.format(field))

        # Parse nearest radar data
        # Mask grid points too far from radar
        fq = radar.fields[field]['data'].flatten()[idx]
        fq = np.ma.masked_where(is_far, fq, copy=False)

        # Populate mapped radar field dictionary
        map_fields[field] = get_metadata(field)
        map_fields[field]['data'] = fq.reshape(domain.shape).astype(np.float32)
        if np.ma.is_masked(fq):
            map_fields[field]['_FillValue'] = fq.fill_value

    # Add grid quality index field
    if gatefilter is not None:

        # Parse nearest gate filter data
        # Set grid quality index to zero for grid points too far from radar
        gqi = gatefilter.gate_included.flatten()[idx]
        gqi[is_far] = 0.0

        # Populate mapped grid quality index dictionary
        map_fields[gqi_field] = get_metadata(gqi_field)
        map_fields[gqi_field]['data'] = gqi.reshape(
            domain.shape).astype(np.float32)

    # Add nearest neighbour distance field
    map_fields[dist_field] = get_metadata(dist_field)
    map_fields[dist_field]['data'] = dists.reshape(
        domain.shape).astype(np.float32)

    # Add nearest neighbor time field
    time = radar.time['data'][:, np.newaxis].repeat(
        radar.ngates, axis=1).flatten()[idx]
    map_fields[time_field] = get_metadata(time_field)
    map_fields[time_field]['data'] = time.reshape(
        domain.shape).astype(np.float32)
    map_fields[time_field]['units'] = radar.time['units']

    # Add radar range field
    map_fields[range_field] = get_metadata(range_field)
    map_fields[range_field]['data'] = _range.reshape(
        domain.shape).astype(np.float32)

    # Add radar azimuth pointing direction field
    map_fields[azimuth_field] = get_metadata(azimuth_field)
    map_fields[azimuth_field]['data'] = azimuth.reshape(
        domain.shape).astype(np.float32)

    # Add radar elevation pointing direction field
    map_fields[elevation_field] = get_metadata(elevation_field)
    map_fields[elevation_field]['data'] = elevation.reshape(
        domain.shape).astype(np.float32)

    # Populate grid metadata
    metadata = common._populate_metadata(radar, weight=None)

    if legacy:
        axes = common._populate_legacy_axes(radar, domain)
        grid = Grid.from_legacy_parameters(map_fields, axes, metadata)
    else:
        grid = Grid(map_fields, axes, metadata)  # this is incorrect

    return grid
예제 #17
0
파일: mapper.py 프로젝트: kejingjing/roapy
def grid_radar(radar, domain, weight=None, fields=None, gatefilter=None,
               toa=17000.0, max_range=None, legacy=False, fill_value=None,
               dist_field=None, weight_field=None, time_field=None,
               gqi_field=None, range_field=None, azimuth_field=None,
               elevation_field=None, debug=False, verbose=False):
    """
    Map volumetric radar data to a rectilinear grid. This routine uses a k-d
    tree space-partitioning data structure for the efficient searching of the
    k-nearest neighbours.

    Parameters
    ----------
    radar : pyart.core.Radar
        Radar containing the fields to be mapped.
    domain : Domain
        Grid domain.
    weight : Weight, optional
        Weight defining the radar data objective analysis parameters and
        available kd-tree information. If None, a one-pass isotropic
        distance-dependent Barnes weight with a constant smoothing parameter
        is used.
    fields : sequence of str, optional
        Radar fields to be mapped. If None, all available radar fields are
        mapped.
    gatefilter : pyart.filters.GateFilter, optional
        GateFilter used to determine the grid quality index. If None, no grid
        quality index field is returned.

    Optional parameters
    -------------------
    toa : float, optional
        Top of the atmosphere in meters. Radar gates above this altitude are
        ignored. Lower heights will increase processing time but may also
        produce poor results if the height is similar to the top level of the
        grid.
    max_range : float, optional
        Grid points further than `max_range` from radar are excluded from
        mapping. If None, the maximum range of the radar is used.
    legacy : bool, optional
        True to return a legacy Py-ART Grid. Note that the legacy Grid is
        planned for removal altogether in future Py-ART releases.
    proc : int, optional
        Number of processes to use when querying the k-d tree.
    debug : bool, optional
        True to print debugging information, False to suppress.
    verbose : bool, optional
        True to print relevant information, False to suppress.

    Return
    ------
    grid : pyart.core.Grid
        Grid containing the mapped volumetric radar data.

    """

    # Parse fill value
    if fill_value is None:
        fill_value = get_fillvalue()

    # Parse field names
    if dist_field is None:
        dist_field = get_field_name('nearest_neighbor_distance')
    if weight_field is None:
        weight_field = get_field_name('nearest_neighbor_weight')
    if time_field is None:
        time_field = get_field_name('nearest_neighbor_time')
    if gqi_field is None:
        gqi_field = get_field_name('grid_quality_index')
    if range_field is None:
        range_field = get_field_name('range')
    if azimuth_field is None:
        azimuth_field = get_field_name('azimuth')
    if elevation_field is None:
        elevation_field = get_field_name('elevation')

    # Parse fields to map
    if fields is None:
        fields = radar.fields.keys()
    elif isinstance(fields, str):
        fields = [fields]
    fields = [field for field in fields if field in radar.fields]

    # Parse radar data objective analysis weight
    if weight is None:
        weight = Weight(radar)

    # Parse maximum range
    if max_range is None:
        max_range = radar.range['data'].max()

    # Calculate radar offset relative to the analysis grid origin
    domain.compute_radar_offset_from_origin(radar, debug=debug)

    # Compute Cartesian coordinates of radar gates relative to specified origin
    # Add reference gate locations and current gate locations to weight object
    # which will help determine if the kd-tree needs to be requeried or not
    z_g, y_g, x_g = transform.equivalent_earth_model(
        radar, offset=domain.radar_offset, debug=debug, verbose=verbose)
    weight._add_gate_reference([z_g, y_g, x_g], replace_existing=False)
    weight._add_gate_coordinates([z_g, y_g, x_g])

    if debug:
        print 'Number of radar gates before pruning: {}'.format(z_g.size)

    # Do not consider radar gates that are above the "top of the atmosphere"
    is_below_toa = z_g <= toa

    if debug:
        N = is_below_toa.sum()
        print 'Number of radar gates below TOA: {}'.format(N)

    # Slice radar coordinates below the TOA
    z_g = z_g[is_below_toa]
    y_g = y_g[is_below_toa]
    x_g = x_g[is_below_toa]

    # Slice radar data fields below the TOA but preserve original radar data
    radar_data = {}
    for field in fields:
        data = radar.fields[field]['data'].copy().flatten()
        radar_data[field] = data[is_below_toa]

    # Parse coordinates of analysis grid
    z_a, y_a, x_a = domain.z, domain.y, domain.x
    nz, ny, nx = domain.nz, domain.ny, domain.nx

    if debug:
        print 'Number of x grid points: {}'.format(nx)
        print 'Number of y grid points: {}'.format(ny)
        print 'Number of z grid points: {}'.format(nz)

    # Create analysis domain coordinates mesh
    z_a, y_a, x_a = np.meshgrid(z_a, y_a, x_a, indexing='ij')
    z_a, y_a, x_a = z_a.flatten(), y_a.flatten(), x_a.flatten()

    if debug:
        print 'Grid 1-D array shape: {}'.format(z_a.shape)

    # Query the radar gate k-d tree for the k-nearest analysis grid points.
    # Also compute the distance-dependent weights
    # This is the step that consumes the most processing time, but it can be
    # skipped if results from a similar radar volume have already computed and
    # stored in the weight object
    if weight.requery(verbose=verbose):

        # Create k-d tree object from radar gate locations
        # Depending on the number of radar gates this can be resource intensive
        # but nonetheless should take on the order of 1 second to create
        weight.create_radar_tree(
            zip(z_g, y_g, x_g), replace_existing=True, debug=debug,
            verbose=verbose)

        _, _ = weight.query_tree(
            zip(z_a, y_a, x_a), store=True, debug=debug, verbose=verbose)

        # Compute distance-dependent weights
        _ = weight.compute_weights(weight.dists, store=True, verbose=verbose)

        # Reset reference radar gate coordinates
        weight._reset_gate_reference()

    # Missing neighbors are indicated with an index set to tree.n
    # This condition will not be met for the nearest neighbor scheme, but
    # it can be met for the Cressman and Barnes schemes if the cutoff radius
    # is not large enough
    is_bad_index = weight.inds == weight.radar_tree.n

    if debug:
        N = is_bad_index.sum()
        print 'Number of invalid indices: {}'.format(N)

    # Grid points which are further than the specified maximum range away from
    # the radar should not contribute
    z_r, y_r, x_r = domain.radar_offset
    _range = np.sqrt((z_a - z_r)**2 + (y_a - y_r)**2 + (x_a - x_r)**2)
    is_far = _range > max_range

    if debug:
        N = is_far.sum()
        print('Number of analysis points too far from radar: {}'.format(N))

    # Populate grid fields
    map_fields = {}
    for field in fields:
        if verbose:
            print('Mapping radar field: {}'.format(field))

        map_fields[field] = common.populate_field(
            radar_data[field], weight.inds, (nz, ny, nx), field,
            weights=weight.wq, mask=is_far, fill_value=None)

    # Add grid quality index field
    if gatefilter is not None:

        # Compute distance-dependent weighted average of k-nearest neighbors
        # for included gates
        sqi = gatefilter.gate_included.flatten()[is_below_toa]
        gqi = np.average(sqi[weight.inds], weights=weight.wq, axis=1)
        gqi[is_far] = 0.0
        map_fields[gqi_field] = get_metadata(gqi_field)
        map_fields[gqi_field]['data'] = gqi.reshape(
            nz, ny, nx).astype(np.float32)

    # Add nearest neighbor distance field
    map_fields[dist_field] = get_metadata(dist_field)
    map_fields[dist_field]['data'] = weight.dists[:,0].reshape(
        nz, ny, nx).astype(np.float32)

    # Add nearest neighbor weight field
    map_fields[weight_field] = get_metadata(weight_field)
    map_fields[weight_field]['data'] = weight.wq[:,0].reshape(
        nz, ny, nx).astype(np.float32)

    # Add nearest neighbor time field
    time = radar.time['data'][:,np.newaxis].repeat(
        radar.ngates, axis=1).flatten()[is_below_toa][weight.inds]
    map_fields[time_field] = get_metadata(time_field)
    map_fields[time_field]['data'] = time[:,0].reshape(
        nz, ny, nx).astype(np.float32)
    map_fields[time_field]['units'] = radar.time['units']

    # Populate grid metadata
    metadata = common._populate_metadata(radar, weight=weight)

    if legacy:
        axes = common._populate_legacy_axes(radar, domain)
        grid = Grid.from_legacy_parameters(map_fields, axes, metadata)
    else:
        grid = None

    return grid
예제 #18
0
def load_mrms_ppi(fdict, **kwargs):
    """
    Read multiple field sweeps from an MRMS radar file NetCDF file.
    
    Input parameters
    ----------------
    fdict : (list)  --> list of dicts [{file: file1, ncvar: "Reflectivity", pvar: "reflecitity"},
                                       {file: file2, ncvar: "Velocity",     pvar: "corrected_velocity"}]
               
       filename : (str) --> name of netCDF MRMS file to read from
       ncvar :    (str) --> name of variable to read from that file
       pvar :     (str) --> mapped name of ncvar into pyART
    
    Returns
    -------
    radar : Radar  --> pyArt radar object
    
    TODO:  For a given set of tilts, all the tilts will be set to the smallest number of gates in the
           tilts.  The data structure is not quite correct.

    """

    _debug = 0

    # Loop over files to find the dimensions of the data.

    n_gates = [1192
               ]  # choose this to be the maximum number of gates we ever need.
    n_rays = []
    n_elev = []
    gates = []

    for n, d in enumerate(fdict):

        try:
            ncfile = ncdf.Dataset(d['file'])
        except IOError:
            print('LOAD_PPI cannot open netCDF file: ', d['file'])
            break

        n_gates.append(len(ncfile.dimensions['Gate']))
        n_rays.append(len(ncfile.dimensions['Azimuth']))
        n_elev.append(ncfile.Elevation)

        ncfile.close()  # important to do this.

    _mygate = min(n_gates)

    if _debug > 0:
        print(n_gates)
        print('LOAD_PPI --> Number of files to read: %d' % len(fdict))
        print('LOAD_PPI --> _mygate: %d' % _mygate)

    # if we get this far, go ahead get to creating the radar object

    # this does not do anything yet except uses the object to create other objects
    # the default configuration is cfradial - for later.

    filemetadata = FileMetadata('cfradial')

    # create all the objects needed below

    _latitude = filemetadata('latitude')
    _longitude = filemetadata('longitude')
    _altitude = filemetadata('altitude')
    _metadata = filemetadata('metadata')
    _sweep_start_ray_index = filemetadata('sweep_start_ray_index')
    _sweep_end_ray_index = filemetadata('sweep_end_ray_index')
    _sweep_number = filemetadata('sweep_number')

    _sweep_mode = filemetadata('sweep_mode')
    _fixed_angle = filemetadata('fixed_angle')
    _time = filemetadata('time')
    _elevation = filemetadata('elevation')
    _azimuth = filemetadata('azimuth')
    _range = filemetadata('range')

    _scan_type = 'other'

    _fields = {}  # dict to hold data

    # loop through files..

    for n, d in enumerate(fdict):

        ncfile = ncdf.Dataset(d['file'])
        pvar = d['pvar']
        ncvar = d['ncvar']

        gwidth = ncfile.variables['GateWidth'][:].mean()

        if n == 0:  # do these things once

            start_time = datetime.datetime.utcfromtimestamp(ncfile.Time)
            _time['data'] = np.array([ncfile.FractionalTime][:])
            _time['units'] = make_time_unit_str(start_time)

            _latitude['data'] = np.array(ncfile.Latitude)
            _longitude['data'] = np.array(ncfile.Longitude)
            _altitude['data'] = np.array([ncfile.Height], 'float64')

            _range['data']                 = ncfile.RangeToFirstGate + ncfile.variables['GateWidth'][0] \
                                           * (np.arange(_mygate-1) + 0.5)
            _sweep_mode['data'] = np.array(['ppi'])
            _azimuth['data'] = np.array(ncfile.variables['Azimuth'][:])
            _fixed_angle['data'] = np.array(n_elev)
            _elevation['data'] = np.array(n_rays[n] * [n_elev[n]])

            _sweep_number['data'] = np.arange(len(fdict), dtype='int32')
            _sweep_start_ray_index['data'] = np.cumsum(
                np.append([0], n_rays[:-1]).astype('int32'))
            _sweep_end_ray_index['data'] = np.cumsum(n_rays).astype(
                'int32') - 1

            # copy meta data once

            metadata_mapping = {
                'vcp-value': 'vcp-value',
                'radarName-value': 'instrument_name',
            }

            for netcdf_attr, metadata_key in metadata_mapping.items():
                if netcdf_attr in ncfile.ncattrs():
                    print(metadata_key, ncfile.getncattr(netcdf_attr))
                    _metadata[metadata_key] = ncfile.getncattr(netcdf_attr)

        # Okay do the big stuff.

        _dict = get_metadata(pvar)
        _dict['data'] = np.ma.array(ncfile.variables[ncvar][:, 0:_mygate - 1])

        if 'MissingData' in ncfile.ncattrs():
            _dict['data'][_dict['data'] == ncfile.MissingData] = np.ma.masked
        if 'RangeFolded' in ncfile.ncattrs():
            _dict['data'][_dict['data'] == ncfile.RangeFolded] = np.ma.masked

        _dict['units'] = ncfile.getncattr('Unit-value')

        if _debug > 299:
            print(ncfile.variables[ncvar][:, 0:_mygate - 1].shape)
            print(_dict['data'].shape)

        _fields[pvar] = _dict

    # With elevation and azimuth in the radar object, lets recalculate
    # gate latitude, longitude and altitude,

    if _debug > 0:
        print('LOAD_PPI:  Volume mean time:  ', start_time)

    if _debug > 100:
        print('LOAD_PPI: final field dictionary: \n', _fields)
        print('LOAD_PPI: ngates.shape: ', _range['data'].shape)
        print('LOAD_PPI: nrays.shape: ', _azimuth['data'].shape)
        print('LOAD_PPI: sweep_start/stop: ', _sweep_start_ray_index['data'],
              _sweep_end_ray_index['data'])
        print('LOAD_PPI: sweeps: ', _sweep_number['data'])

    return Radar( _time, _range, _fields, _metadata, _scan_type,                    \
                  _latitude, _longitude, _altitude,                                 \
                  _sweep_number, _sweep_mode, _fixed_angle, _sweep_start_ray_index, \
                  _sweep_end_ray_index,                                             \
                  _azimuth, _elevation, instrument_parameters=None)
예제 #19
0
def main():
    """
    """
    file_base = '/store/msrad/radar/pyrad_products/rad4alp_hydro_PHA/'
    time_dir_list = ['2017-06-29']
    trt_cell_id = '2017062913000174'
    hres = 250

    datatype_list = ['dBZc', 'ZDRc', 'RhoHVc', 'KDPc', 'TEMP', 'hydro']
    dataset_list = [
        'reflectivity', 'ZDRc', 'RhoHVc', 'KDPc', 'temperature', 'hydroclass'
    ]

    print("====== Plot time-height started: %s" %
          datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
    atexit.register(_print_end_msg, "====== Plot time-height finished: ")

    for time_dir in time_dir_list:
        for i, datatype in enumerate(datatype_list):

            labels = ['50.0-percentile', '25.0-percentile', '75.0-percentile']
            if datatype == 'RhoHVc':
                labels = [
                    '80.0-percentile', '65.0-percentile', '95.0-percentile'
                ]
            elif datatype == 'hydro':
                labels = [
                    'Mode', '2nd most common', '3rd most common',
                    '% points mode', '% points 2nd most common',
                    '% points 3rd most common'
                ]

            dataset = dataset_list[i]
            file_path = file_base + time_dir + '/' + dataset + '_trt_traj/PROFILE/'
            flist = glob.glob(file_path + '*_' + trt_cell_id +
                              '_rhi_profile_*_' + datatype + '_hres' +
                              str(hres) + '.csv')

            if not flist:
                warn('No profile files found in ' + file_path +
                     ' for TRT cell ' + trt_cell_id + ' with resolution ' +
                     str(hres))
                continue

            tbin_edges, hbin_edges, data_ma = read_profile_ts(flist,
                                                              labels,
                                                              hres=hres)

            basepath_out = os.path.dirname(flist[0])
            fname = (basepath_out + '/' + trt_cell_id + '_trt_TIME_HEIGHT_' +
                     datatype + '_hres' + str(hres) + '.png')
            field_name = get_fieldname_pyart(datatype)
            field_dict = get_metadata(field_name)
            titl = 'TRT cell ' + trt_cell_id + '\n' + get_field_name(
                field_dict, field_name)

            vmin = vmax = None
            if datatype == 'RhoHVc':
                vmin = 0.95
                vmax = 1.00
            _plot_time_range(tbin_edges,
                             hbin_edges,
                             data_ma,
                             field_name, [fname],
                             titl=titl,
                             figsize=[10, 8],
                             vmin=vmin,
                             vmax=vmax,
                             dpi=72)

            print("----- plot to '%s'" % fname)
예제 #20
0
def dem2radar_data(radar, dem_data, slice_xy=True, field_name='visibility'):
    """
    get the DEM value corresponding to each radar gate using nearest
    neighbour interpolation

    Parameters
    ----------
    radar : Radar
        the radar object containing the information on the position of the
        radar gates
    dem_data : dict
        dictionary containing the DEM data
    slice_xy : boolean
        if true the horizontal plane of the DEM field is cut to the
        dimensions of the radar field
    field_names : str
        names of DEM fields to convert

    Returns
    -------
    dem_field : dict
        Dictionary with the DEM fields and metadata

    """
    # debugging
    # start_time = time.time()

    x_radar, y_radar, _ = _put_radar_in_swiss_coord(radar)

    (x_dem, y_dem, ind_xmin, ind_ymin, ind_xmax,
     ind_ymax) = (_prepare_for_interpolation(x_radar,
                                             y_radar,
                                             dem_data,
                                             slice_xy=slice_xy))

    if field_name not in dem_data:
        warn('DEM field ' + field_name + ' data not available')
        return None

    values = dem_data[field_name]['data'][ind_xmin:ind_xmax + 1,
                                          ind_ymin:ind_ymax + 1]

    # Note RegularGridInterpolator is 10x faster than NDNearestInterpolator
    # and has the advantage of not extrapolating outside of grid domain

    # replace masked values with nans
    values = np.ma.filled(values, np.nan)
    interp_func = RegularGridInterpolator((x_dem, y_dem),
                                          values,
                                          bounds_error=False)

    # interpolate
    data_interp = interp_func((x_radar, y_radar))

    del values
    # restore mask
    data_interp = np.ma.masked_equal(data_interp, np.nan)

    # put field
    field_dict = get_metadata(field_name)
    field_dict['data'] = data_interp.astype(float)

    del data_interp

    return field_dict
예제 #21
0
def main():
    """
    """
    # parse the arguments
    parser = argparse.ArgumentParser(
        description='Entry to Pyrad processing framework')

    # positional arguments
    parser.add_argument(
        'proc_cfgfile', type=str, help='name of main configuration file')
    parser.add_argument(
        'starttime', type=str,
        help=('starting time of the data to be processed. ' +
              'Format ''YYYYMMDDhhmmss'''))
    parser.add_argument(
        'endtime', type=str,
        help='end time of the data to be processed. Format ''YYYYMMDDhhmmss''')

    # keyword arguments
    parser.add_argument(
        '--cfgpath', type=str,
        default=os.path.expanduser('~')+'/pyrad/config/processing/',
        help='configuration file path')

    parser.add_argument(
        '--storepath', type=str,
        default='/store/msrad/radar/pyrad_products/rad4alp_birds_PHA/',
        help='Base data storing path')

    parser.add_argument(
        '--hres', type=int, default=200, help='Height resolution [m]')

    args = parser.parse_args()

    print("====== PYRAD data processing started: %s" %
          datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
    atexit.register(_print_end_msg,
                    "====== PYRAD data processing finished: ")

    print('config path: '+args.cfgpath)
    print('config file: '+args.proc_cfgfile)
    print('start time: '+args.starttime)
    print('end time: '+args.endtime)

    proc_starttime = datetime.datetime.strptime(
        args.starttime, '%Y%m%d%H%M%S')
    proc_endtime = datetime.datetime.strptime(
        args.endtime, '%Y%m%d%H%M%S')
    cfgfile_proc = args.cfgpath+args.proc_cfgfile

    pyrad_main(cfgfile_proc, starttime=proc_starttime, endtime=proc_endtime)

    # Plot time-height
    file_base = args.storepath
    hres = args.hres

    datatype_list = [
        'dBZc', 'eta_h', 'bird_density', 'WIND_SPEED', 'WIND_DIRECTION',
        'wind_vel_h_u', 'wind_vel_h_v', 'wind_vel_v']

    startdate = proc_starttime.replace(hour=0, minute=0, second=0, microsecond=0)
    enddate = proc_endtime.replace(hour=0, minute=0, second=0, microsecond=0)
    ndays = int((enddate-startdate).days)+1
    for datatype in datatype_list:
        flist = []
        for i in range(ndays):
            time_dir = (
                proc_starttime+datetime.timedelta(days=i)).strftime('%Y-%m-%d')

            filepath = (
                file_base+time_dir+'/VAD/PROFILE_WIND/' +
                '*_wind_profile_VAD_WIND_hres'+str(hres)+'.csv')
            labels = [
                'u_wind', 'std_u_wind', 'np_u_wind',
                'v_wind', 'std_v_wind', 'np_v_wind',
                'w_wind', 'std_w_wind', 'np_w_wind',
                'mag_h_wind', 'dir_h_wind']
            label_nr = 0
            if datatype == 'dBZc':
                filepath = (
                    file_base+time_dir+'/velFilter/PROFILE_dBZc/' +
                    '*_rhi_profile_*_dBZc_hres'+str(hres)+'.csv')
                labels = [
                    '50.0-percentile', '25.0-percentile', '75.0-percentile']

                # dBZ mean data
                # filepath = (
                #     file_base+time_dir+'/velFilter/PROFILE_dBZc_mean/' +
                #     '*_rhi_profile_*_dBZc_hres'+str(hres)+'.csv')
                # labels = [
                #     'Mean', 'Min', 'Max']

                # dBZ linear mean data
                # filepath = (
                #     file_base+time_dir+'/velFilter/PROFILE_dBZc_linear_mean/' +
                #     '*_rhi_profile_*_dBZc_hres'+str(hres)+'.csv')
                # labels = [
                #     'Mean', 'Min', 'Max']

                # dBZ before filtering with fitted velocity
                # filepath = (
                #     file_base+time_dir+'/echoFilter/PROFILE_dBZc/' +
                #     '*_rhi_profile_*_dBZc_hres'+str(hres)+'.csv')
                # labels = [
                #     '50.0-percentile', '25.0-percentile', '75.0-percentile']
                #
                # dBZ before filtering with fitted velocity. Linear mean
                # filepath = (
                #     file_base+time_dir+'/echoFilter/PROFILE_dBZc_linear_mean/' +
                #     '*_rhi_profile_*_dBZc_hres'+str(hres)+'.csv')
                # labels = [
                #     'Mean', 'Min', 'Max']
            elif datatype == 'eta_h':
                filepath = (
                    file_base+time_dir+'/vol_refl/PROFILE/' +
                    '*_rhi_profile_*_eta_h_hres'+str(hres)+'.csv')
                labels = [
                    '50.0-percentile', '25.0-percentile', '75.0-percentile']

                # mean data
                # filepath = (
                #     file_base+time_dir+'/vol_refl/PROFILE_mean/' +
                #     '*_rhi_profile_*_eta_h_hres'+str(hres)+'.csv')
                # labels = [
                #     'Mean', 'Min', 'Max']
            elif datatype == 'bird_density':
                filepath = (
                    file_base+time_dir+'/bird_density/PROFILE/' +
                    '*_rhi_profile_*_bird_density_hres'+str(hres)+'.csv')
                labels = [
                    '50.0-percentile', '25.0-percentile', '75.0-percentile']

                # mean data
                # filepath = (
                #     file_base+time_dir+'/bird_density/PROFILE_mean/' +
                #     '*_rhi_profile_*_bird_density_hres'+str(hres)+'.csv')
                # labels = [
                #     'Mean', 'Min', 'Max']
            elif datatype == 'WIND_SPEED':
                label_nr = 9
            elif datatype == 'WIND_DIRECTION':
                label_nr = 10
            elif datatype == 'wind_vel_h_u':
                label_nr = 0
            elif datatype == 'wind_vel_h_v':
                label_nr = 3
            elif datatype == 'wind_vel_v':
                label_nr = 6

            flist_aux = glob.glob(filepath)
            if not flist_aux:
                warn('No profile files found in '+filepath)
                continue
            flist.extend(flist_aux)

        if not flist:
            warn('No profile files found')
            continue
        flist.sort()

        field_name = get_fieldname_pyart(datatype)
        field_dict = get_metadata(field_name)
        titl = 'bird retrieval '+args.starttime+'\n'+get_field_name(
            field_dict, field_name)

        tbin_edges, hbin_edges, np_ma, data_ma, t_start = read_profile_ts(
            flist, labels, hres=hres, label_nr=label_nr)

        basepath_out = os.path.dirname(flist[0])
        fname = (
            basepath_out+'/'+args.starttime+'_TIME_HEIGHT_' +
            datatype+'_hres'+str(hres)+'.png')

        vmin = vmax = None
        _plot_time_range(
            tbin_edges, hbin_edges/1000., data_ma, field_name, [fname],
            titl=titl, figsize=[10, 8], vmin=vmin, vmax=vmax, dpi=72)

        print("----- plot to '%s'" % fname)

        # Plot number of points
        field_dict = get_metadata('number_of_samples')
        titl = 'bird retrieval '+args.starttime+'\n'+get_field_name(
            field_dict, 'number_of_samples')

        fname = (
            basepath_out+'/'+args.starttime+'_TIME_HEIGHT_' +
            datatype+'nsamples_hres'+str(hres)+'.png')

        vmin = vmax = None
        _plot_time_range(
            tbin_edges, hbin_edges/1000., np_ma, 'number_of_samples', [fname],
            titl=titl, figsize=[10, 8], vmin=vmin, vmax=vmax, dpi=72)

        print("----- plot to '%s'" % fname)
예제 #22
0
def main():
    """
    """
    # parse the arguments
    parser = argparse.ArgumentParser(
        description='Entry to Pyrad processing framework')

    # positional arguments
    parser.add_argument('days',
                        nargs='+',
                        type=str,
                        help='Dates to process. Format YYYYMMDD')

    # keyword arguments
    parser.add_argument(
        '--basepath',
        type=str,
        default='/store/msrad/radar/pyrad_products/rad4alp_hydro_PHA/',
        help='name of folder containing the radar data')

    parser.add_argument(
        '--datatypes',
        type=str,
        default='hydro,KDPc,dBZc,RhoHVc,TEMP,ZDRc',
        help='Name of the polarimetric moments to process. Coma separated')

    parser.add_argument('--steps',
                        type=str,
                        default='None,0.05,0.5,0.001,1.,0.1',
                        help='Step of the histogram for each data type')

    args = parser.parse_args()

    print("====== LMA trajectories radar data processing started: %s" %
          datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
    atexit.register(
        _print_end_msg,
        "====== LMA trajectories radar data processing finished: ")

    day_vec = []
    for day in args.days:
        day_vec.append(datetime.datetime.strptime(day, '%Y%m%d'))

    datatype_vec = args.datatypes.split(',')
    steps = args.steps.split(',')

    if np.size(datatype_vec) != np.size(steps):
        warn(
            str(np.size(datatype_vec)) + ' datatypes but ' +
            str(np.size(steps)) + ' steps. Their number must be equal')
        return

    step_list = []
    for step in steps:
        if step == 'None':
            step_list.append(None)
        else:
            step_list.append(float(step))

    for j, datatype in enumerate(datatype_vec):
        step = step_list[j]

        field_name = get_fieldname_pyart(datatype)
        field_dict = get_metadata(field_name)

        labelx = get_colobar_label(field_dict, field_name)

        values_list = []
        values_first_list = []
        flash_cnt = 0
        source_cnt = 0
        for day in day_vec:
            day_dir = day.strftime('%Y-%m-%d')
            day_str = day.strftime('%Y%m%d')

            fname_test = (args.basepath + day_dir + '/*_traj/AT_FLASH/' +
                          day_str + '*_allflash_ts_trajlightning_' + datatype +
                          '.csv')
            fname_list = glob.glob(fname_test)
            if not fname_list:
                warn('No file found in ' + fname_test)
                continue

            fname = fname_list[0]

            basepath_out = os.path.dirname(fname)
            fname_first_source = (basepath_out + '/' + day_str +
                                  '_firstsource_ts_trajlightning_' + datatype +
                                  '.png')

            fname_all_sources = (basepath_out + '/' + day_str +
                                 '_allsources_ts_trajlightning_' + datatype +
                                 '.png')

            print('\nReading file ' + fname)
            time_flash, flashnr, _, val_at_flash, _, _, _, _ = (
                read_lightning_traj(fname))

            print('N sources: ' + str(flashnr.size))
            source_cnt += flashnr.size

            # Plot all sources histogram
            bins, values = compute_histogram(val_at_flash,
                                             field_name,
                                             step=step)
            print('Valid values: ' + str(values.size))

            values_list.extend(values)

            plot_histogram(bins,
                           values, [fname_all_sources],
                           labelx=labelx,
                           titl=("Trajectory Histogram %s" %
                                 time_flash[0].strftime("%Y-%m-%d")))

            print("----- plot to '%s'" % fname_all_sources)

            # Get and plot first sources histogram
            flashnr_first, unique_ind = np.unique(flashnr, return_index=True)

            print('N flashes: ' + str(flashnr_first.size))
            flash_cnt += flashnr_first.size

            val_first = val_at_flash[unique_ind]
            time_flash_first = time_flash[unique_ind]

            bins, values = compute_histogram(val_first, field_name, step=step)

            values_first_list.extend(values)

            plot_histogram(bins,
                           values, [fname_first_source],
                           labelx=labelx,
                           titl=("Trajectory Histogram First Source %s" %
                                 time_flash_first[0].strftime("%Y-%m-%d")))

            print("----- plot to '%s'" % fname_first_source)

        print('N sources total: ' + str(source_cnt))
        print('N flashes total: ' + str(flash_cnt))

        values_list = np.asarray(values_list)
        values_first_list = np.asarray(values_first_list)

        print('Valid values total: ' + str(values_list.size))
        print('Valid flashes total: ' + str(values_first_list.size))

        # Plot all sources histogram
        fname_all_sources = (args.basepath + '/allsources_ts_trajlightning_' +
                             datatype + '.png')
        plot_histogram(bins,
                       values_list, [fname_all_sources],
                       labelx=labelx,
                       titl="Trajectory Histogram All Sources")

        print("----- plot to '%s'" % fname_all_sources)

        # store histogram
        fname_all_sources = (args.basepath + 'allsources_ts_trajlightning_' +
                             datatype + '.csv')
        hist_values, _ = np.histogram(values_list, bins=bins)
        write_histogram(bins, hist_values, fname_all_sources)
        print('Written ' + ' '.join(fname_all_sources))

        # Plot first source histogram
        fname_first_source = (args.basepath + 'firstsource_ts_trajlightning_' +
                              datatype + '.png')
        plot_histogram(bins,
                       values_first_list, [fname_first_source],
                       labelx=labelx,
                       titl="Trajectory Histogram First Source")

        print("----- plot to '%s'" % fname_first_source)

        # store histogram
        fname_first_source = (args.basepath + 'firstsource_ts_trajlightning_' +
                              datatype + '.csv')
        hist_values_first, _ = np.histogram(values_first_list, bins=bins)
        write_histogram(bins, hist_values_first, fname_first_source)
        print('Written ' + ' '.join(fname_all_sources))
예제 #23
0
def read_dem(fname,
             field_name='terrain_altitude',
             fill_value=None,
             projparams=None):
    """
    Generic reader that reads DEM data from any format, will infer the proper
    reader from filename extension

    Parameters
    ----------
    fname : str
        name of the file to read
    field_name : str
        name of the readed variable
    fill_value : float
        The fill value, if not provided will be infered from metadata
        if possible
    projparams : projection transform as can be used by gdal either a  Proj4 
        string, see epsg.io for a list, or a EPSG number, if not provided
        will be infered from the file, or for ASCII, LV1903 will be used

    Returns
    -------
    dem_data : dictionary
        dictionary with the data and metadata

    """
    extension = pathlib.Path(fname).suffix

    if isinstance(projparams, int):  # Retrieve Wkt code from EPSG number
        proj = osr.SpatialReference()
        proj.ImportFromEPSG(projparams)
        projparams = proj.ExportToProj4()

    projparams = _proj4_str_to_dict(projparams)

    if extension in ['.tif', '.tiff', '.gtif']:
        metadata, rasterarray = read_geotiff_data(fname, fill_value)
    elif extension in ['.asc', '.dem', '.txt']:
        metadata, rasterarray = read_ascii_data(fname, fill_value)
    elif extension in ['.rst']:
        metadata, rasterarray = read_idrisi_data(fname, fill_value)
    else:
        warn('Unable to read file %s, extension must be .tif .tiff .gtif, ' +
             '.asc .dem .txt .rst'.format(fname))
        return None

    field_dict = get_metadata(field_name)
    field_dict['data'] = rasterarray[::-1, :][None, :, :]
    field_dict['units'] = metadata['value units']

    x = get_metadata('x')
    y = get_metadata('y')
    z = get_metadata('z')

    orig_lat = get_metadata('origin_latitude')
    orig_lon = get_metadata('origin_longitude')
    orig_alt = get_metadata('origin_altitude')

    x['data'] = (np.arange(metadata['columns']) * metadata['resolution'] +
                 metadata['resolution'] / 2. + metadata['min. X'])

    y['data'] = (np.arange(metadata['rows']) * metadata['resolution'] +
                 metadata['resolution'] / 2. + metadata['min. Y'])

    z['data'] = np.array([0])

    orig_lat['data'] = [y['data'][0]]
    orig_lon['data'] = [x['data'][0]]
    orig_alt['data'] = [0]

    if projparams is None:
        projparams = _get_lv1903_wkt()

    time = get_metadata('grid_time')
    time['data'] = np.array([0.0])
    time['units'] = 'seconds since 2000-01-01T00:00:00Z'

    # The use of CRS().to_dict() is required to use GridMapDisplay of Pyart
    # which expects a dict for the projection attribute of the grid
    dem_data = pyart.core.Grid(time, {field_name: field_dict},
                               metadata,
                               orig_lat,
                               orig_lon,
                               orig_alt,
                               x,
                               y,
                               z,
                               projection=projparams)

    return dem_data
예제 #24
0
def iso2radar_data(radar,
                   iso0_data,
                   time_info,
                   iso0_statistic='avg_by_dist',
                   field_name='height_over_iso0'):
    """
    get the iso0 value corresponding to each radar gate

    Parameters
    ----------
    radar : Radar
        the radar object containing the information on the position of the
        radar gates
    iso0_data : dict
        dictionary containing the iso0 data and metadata from the model
    time_info : datetime object
        reference time of current radar volume
    iso0_statistic : str
        The statistic used to weight the iso0 points. Can be avg_by_dist,
        avg, min, max
    field_name : str
        name of HZT fields to convert (default height_over_iso0)

    Returns
    -------
    field_dict : dict
        dictionary containing the iso0 data in radar coordinates and the
        metadata

    """
    # get the relevant time indices for interpolation in time
    time_index = np.argmin(abs(iso0_data['fcst_time'] - time_info))
    if time_info > iso0_data['fcst_time'][time_index]:
        time_index_future = time_index + 1
        time_index_past = time_index
    else:
        time_index_future = time_index
        time_index_past = time_index - 1

    # interpolate the iso0 ref in time
    if time_index_past == -1:
        # no interpolation: use data from time_index_future
        iso0_ref = get_iso0_ref(radar,
                                iso0_data,
                                time_index_future,
                                statistic=iso0_statistic)
    elif time_index_future > iso0_data['fcst_time'].size:
        # no interpolation: use data from time_index_past
        iso0_ref = get_iso0_ref(radar,
                                iso0_data,
                                time_index_past,
                                statistic=iso0_statistic)
    else:
        # interpolate between two time steps
        iso0_ref_past = get_iso0_ref(radar,
                                     iso0_data,
                                     time_index_past,
                                     statistic=iso0_statistic)
        iso0_ref_future = get_iso0_ref(radar,
                                       iso0_data,
                                       time_index_future,
                                       statistic=iso0_statistic)

        # put time in seconds from past forecast
        time_info_s = (
            time_info -
            iso0_data['fcst_time'][time_index_past]).total_seconds()
        fcst_time_s = (
            iso0_data['fcst_time'][time_index_future] -
            iso0_data['fcst_time'][time_index_past]).total_seconds()
        iso0_ref = np.interp(time_info_s, [0, fcst_time_s],
                             [iso0_ref_past, iso0_ref_future])

    print('iso0_ref:', iso0_ref)

    # put field
    field_dict = get_metadata(field_name)
    field_dict['data'] = radar.gate_altitude['data'] - iso0_ref

    return field_dict
예제 #25
0
def main():
    """
    """

    # parse the arguments
    parser = argparse.ArgumentParser(
        description='Entry to Pyrad processing framework')

    # positional arguments
    parser.add_argument('proc_cfgfile',
                        type=str,
                        help='name of main configuration file')

    parser.add_argument('days',
                        nargs='+',
                        type=str,
                        help='Dates to process. Format YYYY-MM-DD')

    # keyword arguments
    parser.add_argument('--trtbase',
                        type=str,
                        default='/store/msrad/radar/trt/',
                        help='name of folder containing the TRT cell data')

    parser.add_argument(
        '--radarbase',
        type=str,
        default='/store/msrad/radar/pyrad_products/rad4alp_hydro_PHA/',
        help='name of folder containing the radar data')

    parser.add_argument('--cfgpath',
                        type=str,
                        default=os.path.expanduser('~') +
                        '/pyrad/config/processing/',
                        help='configuration file path')

    parser.add_argument(
        '--datatypes',
        type=str,
        default='hydro,KDPc,dBZc,RhoHVc,TEMP,ZDRc',
        help='Name of the polarimetric moments to process. Coma separated')

    parser.add_argument(
        '--datasets',
        type=str,
        default='hydroclass,KDPc,reflectivity,RhoHVc,temperature,ZDRc',
        help='Name of the directory containing the datasets')

    parser.add_argument('--hres',
                        type=float,
                        default=250.,
                        help='Height resolution')

    args = parser.parse_args()

    print("====== PYRAD TRT data processing started: %s" %
          datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
    atexit.register(_print_end_msg,
                    "====== PYRAD TRT data processing finished: ")

    print('config path: ' + args.cfgpath)
    print('config file: ' + args.proc_cfgfile)
    print('trt path: ' + args.trtbase)
    print('radar data path: ' + args.radarbase)

    cfgfile_proc = args.cfgpath + args.proc_cfgfile
    trajtype = 'trt'

    time_dir_list = args.days
    datatype_list = args.datatypes.split(',')
    dataset_list = args.datasets.split(',')

    if np.size(datatype_list) != np.size(dataset_list):
        warn(
            str(np.size(datatype_list)) + ' datatypes but ' +
            str(np.size(dataset_list)) +
            ' dataset directories. Their number must be equal')
        return

    # Find all TRT files in directory
    trt_list = []
    for time_dir in time_dir_list:
        trt_list.extend(
            glob.glob(args.trtbase + time_dir + '/TRTC_cell_plots/All/*.trt'))
        trt_list.extend(
            glob.glob(args.trtbase + time_dir + '/TRTC_cell_plots/Some/*.trt'))

    # Pyrad data processing
    trt_cell_id_list = []
    trt_file_list = []
    for fname in trt_list:
        print('processing TRT cell file ' + fname)
        try:
            infostr = os.path.basename(fname).split('.')[0]
            pyrad_main(cfgfile_proc,
                       trajfile=fname,
                       infostr=infostr,
                       trajtype=trajtype)
            trt_cell_id_list.append(infostr)
            trt_file_list.append(fname)
        except ValueError:
            print(ValueError)

    # plot time series and get altitude of graupel column
    if 'hydro' in datatype_list:
        cell_ID_list = np.asarray([], dtype=int)
        time_list = np.asarray([], dtype=datetime.datetime)
        lon_list = np.asarray([], dtype=float)
        lat_list = np.asarray([], dtype=float)
        area_list = np.asarray([], dtype=float)
        rank_list = np.asarray([], dtype=float)
        rm_hmin_list = np.ma.asarray([], dtype=float)
        rm_hmax_list = np.ma.asarray([], dtype=float)

    for i, trt_cell_id in enumerate(trt_cell_id_list):
        print('\n\nPost-processing cell: ' + trt_cell_id)
        dt_str = trt_cell_id[0:12]
        dt_cell = datetime.datetime.strptime(dt_str, "%Y%m%d%H%M")
        time_dir = dt_cell.strftime("%Y-%m-%d")
        for j, datatype in enumerate(datatype_list):
            dataset = dataset_list[j]
            file_base2 = args.radarbase + time_dir + '/' + dataset + '_trt_traj/'

            field_name = get_fieldname_pyart(datatype)
            field_dict = get_metadata(field_name)
            titl = 'TRT cell ' + trt_cell_id + '\n' + get_field_name(
                field_dict, field_name)

            # plot time-height
            flist = glob.glob(file_base2 + 'PROFILE/*_' + trt_cell_id +
                              '_rhi_profile_*_' + datatype + '_hres' +
                              str(int(args.hres)) + '.csv')
            if not flist:
                warn('No profile files found in ' + file_base2 +
                     'PROFILE/ for TRT cell ' + trt_cell_id +
                     ' with resolution ' + str(args.hres))
            else:
                labels = [
                    '50.0-percentile', '25.0-percentile', '75.0-percentile'
                ]
                if datatype == 'RhoHVc':
                    labels = [
                        '80.0-percentile', '65.0-percentile', '95.0-percentile'
                    ]
                elif datatype == 'hydro':
                    labels = [
                        'Mode', '2nd most common', '3rd most common',
                        '% points mode', '% points 2nd most common',
                        '% points 3rd most common'
                    ]
                elif datatype == 'entropy' or 'prop' in datatype:
                    labels = ['Mean', 'Min', 'Max']

                tbin_edges, hbin_edges, _, data_ma, start_time = (
                    read_profile_ts(flist, labels, hres=args.hres))

                basepath_out = os.path.dirname(flist[0])
                fname = (basepath_out + '/' + trt_cell_id +
                         '_trt_TIME_HEIGHT_' + datatype + '_hres' +
                         str(args.hres) + '.png')

                vmin = vmax = None
                if datatype == 'RhoHVc':
                    vmin = 0.95
                    vmax = 1.00

                xlabel = ('time (s from ' +
                          start_time.strftime("%Y-%m-%d %H:%M:%S") + ')')
                _plot_time_range(tbin_edges,
                                 hbin_edges,
                                 data_ma,
                                 field_name, [fname],
                                 titl=titl,
                                 xlabel=xlabel,
                                 ylabel='height (m MSL)',
                                 figsize=[10, 8],
                                 vmin=vmin,
                                 vmax=vmax,
                                 dpi=72)

                print("----- plot to '%s'" % fname)

                # Get min and max altitude of graupel/hail area
                if datatype == 'hydro':
                    (traj_ID, yyyymmddHHMM, lon, lat, _, _, _, area, _, _, _,
                     RANKr, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _,
                     _) = read_trt_traj_data(trt_file_list[i])

                    hmin, hmax = get_graupel_column(tbin_edges, hbin_edges,
                                                    data_ma, start_time,
                                                    yyyymmddHHMM)

                    cell_ID_list = np.append(cell_ID_list, traj_ID)
                    time_list = np.append(time_list, yyyymmddHHMM)
                    lon_list = np.append(lon_list, lon)
                    lat_list = np.append(lat_list, lat)
                    area_list = np.append(area_list, area)
                    rank_list = np.append(rank_list, RANKr)
                    rm_hmin_list = np.ma.append(rm_hmin_list, hmin)
                    rm_hmax_list = np.ma.append(rm_hmax_list, hmax)

            # plot time-hist
            flist = glob.glob(file_base2 + 'HISTOGRAM/*_' + trt_cell_id +
                              '_histogram_*_' + datatype + '.csv')

            if not flist:
                warn('No histogram files found in ' + file_base2 +
                     'HISTOGRAM/ for TRT cell ' + trt_cell_id)
            else:
                tbin_edges, bin_edges, data_ma, start_time = read_histogram_ts(
                    flist, datatype)

                basepath_out = os.path.dirname(flist[0])
                fname = (basepath_out + '/' + trt_cell_id + '_trt_HISTOGRAM_' +
                         datatype + '.png')

                data_ma[data_ma == 0.] = np.ma.masked
                xlabel = ('time (s from ' +
                          start_time.strftime("%Y-%m-%d %H:%M:%S") + ')')
                _plot_time_range(tbin_edges,
                                 bin_edges,
                                 data_ma,
                                 'frequency_of_occurrence', [fname],
                                 titl=titl,
                                 xlabel=xlabel,
                                 ylabel=get_colobar_label(
                                     field_dict, field_name),
                                 vmin=0.,
                                 vmax=np.max(data_ma),
                                 figsize=[10, 8],
                                 dpi=72)

                print("----- plot to '%s'" % fname)

            # plot quantiles
            flist = glob.glob(file_base2 + 'QUANTILES/*_' + trt_cell_id +
                              '_quantiles_*_' + datatype + '.csv')

            if not flist:
                warn('No quantiles files found in ' + file_base2 +
                     'QUANTILES/ for TRT cell ' + trt_cell_id)
                continue

            tbin_edges, qbin_edges, data_ma, start_time = read_quantiles_ts(
                flist, step=5., qmin=0., qmax=100.)

            basepath_out = os.path.dirname(flist[0])
            fname = (basepath_out + '/' + trt_cell_id + '_trt_QUANTILES_' +
                     datatype + '.png')

            vmin = vmax = None
            if datatype == 'RhoHVc':
                vmin = 0.95
                vmax = 1.00
            xlabel = ('time (s from ' +
                      start_time.strftime("%Y-%m-%d %H:%M:%S") + ')')
            _plot_time_range(tbin_edges,
                             qbin_edges,
                             data_ma,
                             field_name, [fname],
                             titl=titl,
                             xlabel=xlabel,
                             ylabel='Quantile',
                             vmin=vmin,
                             vmax=vmax,
                             figsize=[10, 8],
                             dpi=72)

            print("----- plot to '%s'" % fname)

    if 'hydro' in datatype_list:
        fname = args.trtbase + 'cell_rimmed_particles_column.csv'
        write_trt_cell_lightning(cell_ID_list, time_list, lon_list, lat_list,
                                 area_list, rank_list, rm_hmin_list,
                                 rm_hmax_list, fname)

        print("----- written to '%s'" % fname)
예제 #26
0
def main():
    """
    """

    # parse the arguments
    parser = argparse.ArgumentParser(
        description='Entry to Pyrad processing framework')

    # keyword arguments
    parser.add_argument('--database',
                        type=str,
                        default='/store/msrad/radar/pyrad_products/',
                        help='base path to the radar data')

    parser.add_argument(
        '--datadirs',
        type=str,
        default=(
            'mals_sha_windmills_point_psr_filtered_WM1_20200304-20200311,'
            'mals_sha_windmills_point_psr_filtered_WM1_20200312-20200315,'
            'mals_sha_windmills_point_psr_filtered_WM1_20200316-20200320,'
            'mals_sha_windmills_point_psr_filtered_WM1_20200321-20200325'),
        help='directories containing data')

    parser.add_argument(
        '--datatypes',
        type=str,
        default='dBuZ,dBuZv,rcs_h,rcs_v,ZDRu,RhoHVu,uPhiDPu,Vu,Wu',
        help='Data types. Coma separated')

    args = parser.parse_args()

    print("====== PYRAD windmill data processing started: %s" %
          datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
    atexit.register(_print_end_msg,
                    "====== PYRAD windmill data processing finished: ")

    datadirs = args.datadirs.split(',')
    datatypes = args.datatypes.split(',')

    # Read periods of processing
    for datatype in datatypes:
        first_read = False
        for datadir in datadirs:
            # Read data time series files
            flist = glob.glob(args.database + datadir + '/' + datatype +
                              '_TS/TS/ts_POINT_MEASUREMENT_hist_' + datatype +
                              '.csv')
            if not flist:
                continue

            hist_aux, bin_edges_aux = read_histogram(flist[0])
            if not first_read:
                hist = hist_aux
                bin_edges = bin_edges_aux
                first_read = True
                continue

            hist += hist_aux

        basepath = os.path.dirname(flist[0]) + '/'

        # Histogram plots
        field_name = get_fieldname_pyart(datatype)
        field_dict = get_metadata(field_name)

        fname = args.database + 'ts_POINT_MEASUREMENT_hist_' + datatype + '.png'

        bin_centers = bin_edges[:-1] + ((bin_edges[1] - bin_edges[0]) / 2.)
        fname = plot_histogram2(bin_centers,
                                hist, [fname],
                                labelx=get_colobar_label(
                                    field_dict, field_name),
                                titl=datatype)
        print('Plotted ' + ' '.join(fname))

        fname = args.database + 'ts_POINT_MEASUREMENT_hist_' + datatype + '.csv'
        fname = write_histogram(bin_edges, hist, fname)
        print('Written ' + fname)
예제 #27
0
def main():
    """
    """

    # parse the arguments
    parser = argparse.ArgumentParser(
        description='Entry to Pyrad processing framework')

    # keyword arguments
    parser.add_argument('--database',
                        type=str,
                        default='/store/msrad/radar/pyrad_products/',
                        help='base path to the radar data')

    parser.add_argument(
        '--datadirs',
        type=str,
        default=(
            'mals_sha_windmills_point_psr_filtered_WM1_20200304-20200311,'
            'mals_sha_windmills_point_psr_filtered_WM1_20200312-20200315,'
            'mals_sha_windmills_point_psr_filtered_WM1_20200316-20200320,'
            'mals_sha_windmills_point_psr_filtered_WM1_20200321-20200325'),
        help='directories containing data')

    parser.add_argument(
        '--datatypes',
        type=str,
        default='dBuZ,dBuZv,rcs_h,rcs_v,uPhiDPu,RhoHVu,ZDRu,Vu,Wu',
        help='Data types. Coma separated')

    parser.add_argument(
        '--orientations',
        type=str,
        default=
        '0,10,20,30,40,50,60,70,80,90,100,110,120,130,140,150,160,170,180,190,200,210,220,230,240,250,260,270,280,290,300,310,320,330,340,350',
        help='Orientation respect to radar')

    parser.add_argument('--span', type=float, default=10., help='Span')

    parser.add_argument('--vel_limit',
                        type=float,
                        default=0.,
                        help='Velocity limit')

    args = parser.parse_args()

    print("====== PYRAD windmill data processing started: %s" %
          datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
    atexit.register(_print_end_msg,
                    "====== PYRAD windmill data processing finished: ")

    datadirs = args.datadirs.split(',')
    datatypes = args.datatypes.split(',')

    orientations = np.asarray(args.orientations.split(','), dtype=float)
    speeds = [
        'speed_GT' + str(args.vel_limit), 'speed_LE' + str(args.vel_limit)
    ]

    scan_type = 'ppi'

    for ori in orientations:
        for speed in speeds:
            for datatype in datatypes:
                first_read = False
                for datadir in datadirs:
                    # Read data time series files
                    flist = glob.glob(args.database + datadir + '/' +
                                      datatype + '_TS/TS/' + datatype +
                                      '_span' + str(args.span) + '_ori' +
                                      str(ori) + '_' + speed + '_hist.csv')

                    if not flist:
                        continue

                    hist_aux, bin_edges_aux = read_histogram(flist[0])
                    if not first_read:
                        hist = hist_aux
                        bin_edges = bin_edges_aux
                        first_read = True
                        continue

                    hist += hist_aux

                if not first_read:
                    warn('No files for orientation ' + str(ori) + ' and ' +
                         speed)
                    continue

                # Histogram plots
                field_name = get_fieldname_pyart(datatype)
                field_dict = get_metadata(field_name)

                fname = (args.database + datatype + '_span' + str(args.span) +
                         '_ori' + str(ori) + '_' + speed + '_hist.png')

                titl = (datatype + ' span ' + str(args.span) + ' ori ' +
                        str(ori) + ' ' + speed)

                bin_centers = bin_edges[:-1] + (
                    (bin_edges[1] - bin_edges[0]) / 2.)
                fname = plot_histogram2(bin_centers,
                                        hist, [fname],
                                        labelx=get_colobar_label(
                                            field_dict, field_name),
                                        titl=titl)
                print('Plotted ' + ' '.join(fname))

                fname = (args.database + datatype + '_span' + str(args.span) +
                         '_ori' + str(ori) + '_' + speed + '_hist.csv')
                fname = write_histogram(bin_edges, hist, fname)
                print('Written ' + fname)
예제 #28
0
def cosmo2radar_coord(radar, cosmo_coord, slice_xy=True, slice_z=False,
                      field_name=None):
    """
    Given the radar coordinates find the nearest COSMO model pixel

    Parameters
    ----------
    radar : Radar
        the radar object containing the information on the position of the
        radar gates
    cosmo_coord : dict
        dictionary containing the COSMO coordinates
    slice_xy : boolean
        if true the horizontal plane of the COSMO field is cut to the
        dimensions of the radar field
    slice_z : boolean
        if true the vertical plane of the COSMO field is cut to the dimensions
        of the radar field
    field_name : str
        name of the field

    Returns
    -------
    cosmo_ind_field : dict
        dictionary containing a field of COSMO indices and metadata

    """
    # debugging
    # start_time = time.time()

    # parse the field parameters
    if field_name is None:
        field_name = get_field_name('cosmo_index')

    x_radar, y_radar, z_radar = _put_radar_in_swiss_coord(radar)

    (x_cosmo, y_cosmo, z_cosmo, ind_xmin, ind_ymin, ind_zmin, ind_xmax,
     ind_ymax, _) = _prepare_for_interpolation(
         x_radar, y_radar, z_radar, cosmo_coord, slice_xy=slice_xy,
         slice_z=slice_z)

    print('Generating tree')
    # default scipy compact_nodes and balanced_tree = True
    tree = cKDTree(
        np.transpose((z_cosmo, y_cosmo, x_cosmo)), compact_nodes=False,
        balanced_tree=False)
    print('Tree generated')
    _, ind_vec = tree.query(np.transpose(
        (z_radar.flatten(), y_radar.flatten(), x_radar.flatten())), k=1)

    # put the index in the original cosmo coordinates
    nx_cosmo = len(cosmo_coord['x']['data'])
    ny_cosmo = len(cosmo_coord['y']['data'])

    nx = ind_xmax-ind_xmin+1
    ny = ind_ymax-ind_ymin+1

    ind_z = (ind_vec/(nx*ny)).astype(int)+ind_zmin
    ind_y = ((ind_vec-nx*ny*ind_z)/nx).astype(int)+ind_ymin
    ind_x = ((ind_vec-nx*ny*ind_z) % nx).astype(int)+ind_xmin
    ind_cosmo = (ind_x+nx_cosmo*ind_y+nx_cosmo*ny_cosmo*ind_z).astype(int)

    cosmo_ind_field = get_metadata(field_name)
    cosmo_ind_field['data'] = ind_cosmo.reshape(radar.nrays, radar.ngates)

    # debugging
    # print(" generating COSMO indices takes %s seconds " %
    #      (time.time() - start_time))

    return cosmo_ind_field
예제 #29
0
def cosmo2radar_data(radar, cosmo_coord, cosmo_data, time_index=0,
                     slice_xy=True, slice_z=False,
                     field_names=['temperature'], dtype=np.float32):
    """
    get the COSMO value corresponding to each radar gate using nearest
    neighbour interpolation

    Parameters
    ----------
    radar : Radar
        the radar object containing the information on the position of the
        radar gates
    cosmo_coord : dict
        dictionary containing the COSMO coordinates
    cosmo_data : dict
        dictionary containing the COSMO data
    time_index : int
        index of the forecasted data
    slice_xy : boolean
        if true the horizontal plane of the COSMO field is cut to the
        dimensions of the radar field
    slice_z : boolean
        if true the vertical plane of the COSMO field is cut to the dimensions
        of the radar field
    field_names : str
        names of COSMO fields to convert (default temperature)
    dtype : numpy data type object
        the data type of the output data

    Returns
    -------
    cosmo_fields : list of dict
        list of dictionary with the COSMO fields and metadata

    """
    # debugging
    # start_time = time.time()

    x_radar, y_radar, z_radar = _put_radar_in_swiss_coord(radar)

    (x_cosmo, y_cosmo, z_cosmo, ind_xmin, ind_ymin, ind_zmin, ind_xmax,
     ind_ymax, ind_zmax) = _prepare_for_interpolation(
         x_radar, y_radar, z_radar, cosmo_coord, slice_xy=slice_xy,
         slice_z=slice_z)

    cosmo_fields = []
    for field in field_names:
        if field not in cosmo_data:
            warn('COSMO field '+field+' data not available')
        else:
            values = cosmo_data[field]['data'][
                time_index, ind_zmin:ind_zmax+1, ind_ymin:ind_ymax+1,
                ind_xmin:ind_xmax+1].flatten()
            # find interpolation function
            tree_options = {
                'compact_nodes': False,
                'balanced_tree': False
            }
            interp_func = NearestNDInterpolator(
                (z_cosmo, y_cosmo, x_cosmo), values,
                tree_options=tree_options)

            del values

            # interpolate
            data_interp = interp_func((z_radar, y_radar, x_radar))

            # put field
            field_dict = get_metadata(field)
            field_dict['data'] = data_interp.astype(dtype)
            cosmo_fields.append({field: field_dict})

            del data_interp

    if not cosmo_fields:
        warn('COSMO data not available')
        return None

    return cosmo_fields
예제 #30
0
def grib2radar_data(radar,
                    iso0_data,
                    time_info,
                    time_interp=True,
                    field_name='height_over_iso0'):
    """
    get the iso0 value corresponding to each radar gate

    Parameters
    ----------
    radar : Radar
        the radar object containing the information on the position of the
        radar gates
    iso0_data : dict
        dictionary containing the iso0 data and metadata from the model
    time_info : datetime object
        reference time of current radar volume
    iso0_statistic : str
        The statistic used to weight the iso0 points. Can be avg_by_dist,
        avg, min, max
    field_name : str
        name of HZT fields to convert (default height_over_iso0)

    Returns
    -------
    field_dict : dict
        dictionary containing the iso0 data in radar coordinates and the
        metadata

    """
    time_index = np.argmin(abs(iso0_data['fcst_time'] - time_info))

    if time_interp:
        # get the relevant time indices for interpolation in time
        if time_info > iso0_data['fcst_time'][time_index]:
            time_index_future = time_index + 1
            time_index_past = time_index
        else:
            time_index_future = time_index
            time_index_past = time_index - 1

        # interpolate the iso0 ref in time
        if time_index_past == -1:
            # no interpolation: use data from time_index_future
            iso0_ref = iso0_data['values'][time_index_future, :, :]
        elif time_index_future > iso0_data['fcst_time'].size:
            # no interpolation: use data from time_index_past
            iso0_ref = iso0_data['values'][time_index_past, :, :]
        else:
            # put time in seconds from past forecast
            time_info_s = (
                time_info -
                iso0_data['fcst_time'][time_index_past]).total_seconds()
            fcst_time_s = (
                iso0_data['fcst_time'][time_index_future] -
                iso0_data['fcst_time'][time_index_past]).total_seconds()

            # interpolate between two time steps
            fcst_time = np.array([0, fcst_time_s])
            values = iso0_data['fcst_time'][time_index_past:time_index_future +
                                            1, :, :]
            f = interp1d(fcst_time, values, axis=0, assume_sorted=True)

            iso0_ref = f(time_info_s)
    else:
        iso0_ref = iso0_data['values'][time_index, :, :]

    x_iso, y_iso = geographic_to_cartesian_aeqd(iso0_data['lons'],
                                                iso0_data['lats'],
                                                radar.longitude['data'][0],
                                                radar.latitude['data'][0])

    # find interpolation function
    interp_func = NearestNDInterpolator(
        list(zip(x_iso.flatten(), y_iso.flatten())), iso0_ref.flatten())

    # interpolate
    data_interp = interp_func(
        (radar.gate_x['data'].flatten(), radar.gate_y['data'].flatten()))
    data_interp = data_interp.reshape((radar.nrays, radar.ngates))

    # put field
    field_dict = get_metadata(field_name)
    field_dict['data'] = radar.gate_altitude['data'] - data_interp

    return field_dict