Ejemplo n.º 1
0
def process_radar(radar, domain, weight, outdir, gatefilter=None, debug=False,
                  verbose=False):
    """
    """

    if verbose:
        print('Processing file: {}'.format(os.path.basename(filename)))

    # Read radar data
    radar = read(filename, exclude_fields=EXCLUDE_FIELDS)

    # Create gatefilter from significant detection
    gf = GateFilter(radar)
    gf.exclude_below(SD_FIELD, 1, op='or', inclusive=False)

    if debug:
        print('Number of sweeps: {}'.format(radar.nsweeps))

    # Grid radar data
    grid = grid_radar(
        radar, domain, weight=weight, fields=FIELDS, gatefilter=gf, toa=TOA,
        max_range=MAX_RANGE, gqi_field=None, legacy=True, debug=debug,
        verbose=verbose)

    # Add new metadata
    _add_metadata(grid, filename)

    # ARM file name protocols
    date_stamp = datetimes_from_radar(radar).min().strftime('%Y%m%d.%H%M%S')
    fname = 'nexradwsr88d{}{}.{}.{}.cdf'.format(QF, FN, DL, date_stamp)

    # Write MMCG NetCDF file
    grid_io.write_grid(
        os.path.join(outdir, fname), grid, format=FORMAT,
        write_proj_coord_sys=False, proj_coord_sys=None,
        arm_time_variables=True, write_point_x_y_z=False,
        write_point_lon_lat_alt=False)

    return
Ejemplo n.º 2
0
def _loop_over_dict(json_file, pickle_file, inpdir=None, outdir=None, verbose=False, debug=False):
    """
    """

    # Parse files from JSON
    with open(json_file, "r") as fid:
        files = json.load(fid)

    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    # Loop over all files
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=EXCLUDE_FIELDS)

        if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS:
            continue
        if VCP_RAYS is not None and radar.nrays != VCP_RAYS:
            continue

        if verbose:
            print "Processing file %s" % os.path.basename(f)

        # Determine significant detection of the radar
        gatefilter = noise.velocity_coherency(
            radar,
            gatefilter=None,
            num_bins=BINS_VDOP_COHER,
            limits=LIMITS_VDOP_COHER,
            texture_window=(3, 3),
            texture_sample=5,
            min_sigma=None,
            max_sigma=None,
            nyquist=None,
            rays_wrap_around=False,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            fill_value=None,
            verbose=verbose,
        )
        gatefilter = noise.spectrum_width_coherency(
            radar,
            gatefilter=gatefilter,
            num_bins=BINS_SW_COHER,
            limits=LIMITS_SW_COHER,
            texture_window=(3, 3),
            texture_sample=5,
            min_sigma=None,
            max_sigma=None,
            rays_wrap_around=False,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            fill_value=None,
            verbose=True,
        )
        gatefilter = noise.significant_detection(
            radar,
            gatefilter=gatefilter,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            min_ncp=MIN_NCP,
            detect_field=None,
            verbose=verbose,
        )

        # Compute histogram counts for each field
        geo.height_histogram_from_radar(
            radar,
            HIST_DICT,
            gatefilter=gatefilter,
            min_ncp=MIN_NCP,
            min_sweep=MIN_SWEEP,
            max_sweep=MAX_SWEEP,
            min_range=MIN_RANGE,
            max_range=MAX_RANGE,
            fill_value=None,
            ncp_field=NCP_FIELD,
            verbose=verbose,
            debug=debug,
        )

    # Parse bin edges and histogram counts
    bin_edges = HIST_DICT["bin edges"]
    counts = HIST_DICT["histogram counts"]

    # Compute normalized histogram and probability density
    # Add these to the histogram dictionary
    counts_norm = counts.astype(np.float64) / counts.max()
    pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges))
    HIST_DICT["normalized histogram"] = counts_norm
    HIST_DICT["probability density"] = pdf

    # Include other parameters in the histogram dictionary
    HIST_DICT["radar files"] = files
    HIST_DICT["min sweep"] = MIN_SWEEP
    HIST_DICT["max sweep"] = MAX_SWEEP
    HIST_DICT["min range"] = MIN_RANGE
    HIST_DICT["max range"] = MAX_RANGE
    HIST_DICT["sweeps in VCP"] = VCP_SWEEPS
    HIST_DICT["rays in VCP"] = VCP_RAYS
    HIST_DICT["min NCP"] = MIN_NCP

    # Pickle histogram data
    geo._pickle_histograms(HIST_DICT, pickle_file, outdir=outdir)

    return
Ejemplo n.º 3
0
    if args.verbose:
        print 'MAX_RANGE -> {} km'.format(MAX_RANGE)

    # Parse files to plot
    files = [os.path.join(args.inpdir, f) for f in
             sorted(os.listdir(args.inpdir)) if args.stamp in f]

    if args.verbose:
        print 'Number of files to plot: {}'.format(len(files))

    # Loop over all files
    for filename in files:

        if args.verbose:
            print 'Plotting file: {}'.format(os.path.basename(filename))

        # Read radar data
        radar = read(filename, exclude_fields=EXCLUDE_FIELDS)

        start = time.time()

        # Call desired plotting function
        multipanel(radar, args.outdir, dpi=args.dpi, debug=args.debug,
                   verbose=args.verbose)

        # Record elapsed time
        if args.verbose:
            elapsed = time.time() - start
            print('Elapsed time to save plot: {:.0f} sec'.format(elapsed))
Ejemplo n.º 4
0
def _loop_over_dict(
        json_file, pickle_file, inpdir=None, outdir=None, verbose=False,
        debug=False):
    """
    """

    # Parse files from JSON
    with open(json_file, 'r') as fid:
        files = json.load(fid)

    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    # Loop over all files
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=EXCLUDE_FIELDS)

        if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS:
            continue
        if VCP_RAYS is not None and radar.nrays != VCP_RAYS:
            continue

        if verbose:
            print 'Processing file %s' % os.path.basename(f)

        # Determine significant detection of the radar
        gatefilter = noise.velocity_coherency(
            radar, gatefilter=None, num_bins=BINS_VDOP_COHER,
            limits=LIMITS_VDOP_COHER, texture_window=(3, 3),
            texture_sample=5, min_sigma=None, max_sigma=None, nyquist=None,
            rays_wrap_around=False, remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_value=None,
            verbose=verbose)
        gatefilter = noise.spectrum_width_coherency(
            radar, gatefilter=gatefilter, num_bins=BINS_SW_COHER,
            limits=LIMITS_SW_COHER, texture_window=(3, 3), texture_sample=5,
            min_sigma=None, max_sigma=None, rays_wrap_around=False,
            remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE, fill_value=None, verbose=True)
        gatefilter = noise.significant_detection(
            radar, gatefilter=gatefilter, remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, min_ncp=MIN_NCP,
            detect_field=None, verbose=verbose)

        # Compute histogram counts for each field
        geo.height_histogram_from_radar(
            radar, HIST_DICT, gatefilter=gatefilter, min_ncp=MIN_NCP,
            min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, min_range=MIN_RANGE,
            max_range=MAX_RANGE, fill_value=None, ncp_field=NCP_FIELD,
            verbose=verbose, debug=debug)

    # Parse bin edges and histogram counts
    bin_edges = HIST_DICT['bin edges']
    counts = HIST_DICT['histogram counts']

    # Compute normalized histogram and probability density
    # Add these to the histogram dictionary
    counts_norm = counts.astype(np.float64) / counts.max()
    pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges))
    HIST_DICT['normalized histogram'] = counts_norm
    HIST_DICT['probability density'] = pdf

    # Include other parameters in the histogram dictionary
    HIST_DICT['radar files'] = files
    HIST_DICT['min sweep'] = MIN_SWEEP
    HIST_DICT['max sweep'] = MAX_SWEEP
    HIST_DICT['min range'] = MIN_RANGE
    HIST_DICT['max range'] = MAX_RANGE
    HIST_DICT['sweeps in VCP'] = VCP_SWEEPS
    HIST_DICT['rays in VCP'] = VCP_RAYS
    HIST_DICT['min NCP'] = MIN_NCP

    # Pickle histogram data
    geo._pickle_histograms(
        HIST_DICT, pickle_file, outdir=outdir)

    return
Ejemplo n.º 5
0
    if args.verbose:
        print('MAX_RANGE -> {}'.format(MAX_RANGE))

    # Parse radar files to plot
    files = [os.path.join(args.inpdir, f) for f
             in sorted(os.listdir(args.inpdir)) if args.stamp in f]

    if args.verbose:
        print('Number of files to plot: {}'.format(len(files)))

    # Loop over all files
    for filename in files:

        if args.verbose:
            print('Plotting file: {}'.format(os.path.basename(filename)))

        # Read radar data
        radar = read(filename, exclude_fields=None)

        if args.verbose:
            start = time.time()

        # Call desired plotting function
        multipanel(radar, args.outdir, dpi=args.dpi, debug=args.debug,
                   verbose=args.verbose)

        if args.verbose:
            elapsed = time.time() - start
            print('Elapsed time to save plot: {:.0f} sec'.format(elapsed))
Ejemplo n.º 6
0
def map_from_json(
        filename, inpdir=None, vcp_sweeps=None, vcp_rays=None, vcp_gates=None,
        min_ncp=None, use_filter=True, texture_window=(3, 3), texture_sample=5,
        vdop_bins=100, vdop_limits=(0, 20), sw_bins=50, sw_limits=(0, 5),
        remove_salt=True, salt_window=(5, 5), salt_sample=10,
        exclude_fields=None, ncp_field=None, debug=False, verbose=False):
    """
    Compute the non-precipitating frequency (probability) map from the files
    listed in a JSON file. The listed files should define a non-precipitating
    time period where (most) echoes present must be, by definition, not
    precipitation or cloud.

    Parameters
    ----------

    Optional Parameters
    -------------------

    Returns
    -------
    """

    if ncp_field is None:
        ncp_field = get_field_name('normalized_coherent_power')

    # Parse files from JSON file
    with open(filename, 'r') as fid:
        files = json.load(fid)

    # Append input directory if provided
    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    if verbose:
        print 'Total number of radar files to process = %i' % len(files)

    # Parse non-precipitating frequency map
    if vcp_rays is not None and vcp_gates is not None:
        nonprecip = np.zeros((vcp_rays, vcp_gates), dtype=np.float64)
    else:
        nonprecip = None

    # Loop over all files
    sample_size = 0
    for i, f in enumerate(files):

        # Read radar data
        radar = read(f, exclude_fields=exclude_fields)

        # Check radar VCP parameters
        if vcp_sweeps is not None and radar.nsweeps != vcp_sweeps:
            continue
        if vcp_rays is not None and radar.nrays != vcp_rays:
            continue
        if vcp_gates is not None and radar.ngates != vcp_gates:
            continue

        if verbose:
            print 'Processing file %s' % os.path.basename(f)

        # Initialize the non-precipitation frequency map if it does not exist
        if i == 0 and nonprecip is None:
            vcp_sweeps = radar.nsweeps
            vcp_rays = radar.nrays
            vcp_gates = radar.ngates
            nonprecip = np.zeros((vcp_rays, vcp_gates), dtype=np.float64)

            if verbose:
                print 'VCP sweeps = {}'.format(vcp_sweeps)
                print 'VCP rays = {}'.format(vcp_rays)
                print 'VCP gates = {}'.format(vcp_gates)

        # Increase sample size
        sample_size += 1

        # Determine significant detection
        if use_filter:

            # Doppler velocity coherency
            gatefilter = noise.velocity_coherency(
                radar, gatefilter=None, num_bins=vdop_bins, limits=vdop_limits,
                texture_window=texture_window, texture_sample=texture_sample,
                min_sigma=None, max_sigma=None, nyquist=None,
                rays_wrap_around=False, remove_salt=remove_salt,
                salt_window=salt_window, salt_sample=salt_sample,
                fill_value=None, verbose=verbose)

            # Spectrum width coherency
            gatefilter = noise.spectrum_width_coherency(
                radar, gatefilter=gatefilter, num_bins=sw_bins,
                limits=sw_limits, texture_window=texture_window,
                texture_sample=texture_sample, min_sigma=None, max_sigma=None,
                rays_wrap_around=False, remove_salt=remove_salt,
                salt_window=salt_window, salt_sample=salt_sample,
                fill_value=None, verbose=verbose)

            # Significant detection
            gatefilter = noise.significant_detection(
                radar, gatefilter=gatefilter, remove_salt=remove_salt,
                salt_window=salt_window, salt_sample=salt_sample,
                min_ncp=min_ncp, detect_field=None, verbose=verbose)

            # Parse gate filter
            is_coherent = gatefilter.gate_included.astype(np.float64)

        elif min_ncp is not None:
            is_coherent = radar.fields[ncp_field]['data'] >= min_ncp
            is_coherent = np.ma.filled(is_coherent, False).astype(np.float64)

        else:
            raise ValueError('No way to determine significant detection')

        # Increase the non-precipitation map for all coherent gates (pixels)
        nonprecip += is_coherent

    # Compute the probability a gate (pixel) has a valid echo during
    # non-precipitating events
    nonprecip_map = nonprecip / sample_size

    # Add clutter frequency map to (last) radar object
    nonprecip = {
        'data': nonprecip_map,
        'long_name': 'Non-precipitating (clutter) frequency map',
        'standard_name': 'clutter_map',
        'valid_min': 0.0,
        'valid_max': 1.0,
        '_FillValue': None,
        'units': None,
    }
    radar.add_field('clutter_map', nonprecip, replace_existing=False)

    return {
        'non-precipitating map': nonprecip_map,
        'last radar': radar,
        'sample size': sample_size,
        'radar files': [os.path.basename(f) for f in files],
        'vcp_sweeps': vcp_sweeps,
        'vcp_rays': vcp_rays,
        'vcp_gates': vcp_gates,
        'min_ncp': min_ncp,
        'use_filter': use_filter,
        'texture_window': texture_window,
        'texture_sample': texture_sample,
        'remove_salt': remove_salt,
        'salt_window': salt_window,
        'salt_sample': salt_sample,
    }
Ejemplo n.º 7
0
def map_date_range(start, stop, stamp, inpdir, date_str='[0-9]{12}',
                   date_fmt='%y%m%d%H%M%S', min_ncp=None, vcp_sweeps=None,
                   vcp_rays=None, exclude_fields=None, ncp_field=None,
                   debug=False, verbose=False):
    """
    Compute the clutter frequency (probability) map within the specified date
    range. The start and stop times should define a non-precipitating time
    period where (most) echoes present must be, by definition, clutter.

    Parameters
    ----------

    Optional Parameters
    -------------------

    Returns
    -------
    """

    # Parse field names
    if refl_field is None:
        refl_field = get_field_name('reflectivity')
    if ncp_field is None:
        ncp_field = get_field_name('normalized_coherent_power')

    # Get all files with stamp in directory
    files = [os.path.join(inpdir, f) for f in sorted(os.listdir(inpdir))
             if stamp in f]

    if verbose:
        print 'Total number of radar files found = %i' % len(files)

    # Remove files outside date range
    time_str = [re.search(date_str, f).group() for f in files]
    times = [datetime.strptime(string, date_fmt) for string in time_str]
    files = [
        f for f, time in zip(files, times) if time >= start and time <= stop]

    if verbose:
        print 'Number of radar files after within date range = %i' % len(files)

    if vcp_sweeps is not None and vcp_rays is not None:
        nonprecip = np.zeros((vcp_sweeps, vcp_rays), dtype=np.float64)
    else:
        nonprecip = None

    # Loop over all files
    sample_size = 0
    for i, f in enumerate(files):

        if verbose:
            print 'Processing file %s' % os.path.basename(f)

        # Read radar data
        radar = read(f, exclude_fields=exclude_fields)

        # Check radar VCP
        if vcp_sweeps is not None and radar.nsweeps != vcp_sweeps:
            continue
        if vcp_rays is not None and radar.nrays != vcp_rays:
            continue

        # Initialize the non-precipitation map if not already done so
        if i == 0 and nonprecip is None:
            nonprecip = np.zeros((radar.nrays, radar.ngates), dtype=np.float64)

        # Increase sample size
        sample_size += 1

        # Find coherent pixels
        if min_ncp is not None:
            is_coherent = radar.fields[ncp_field]['data'] >= min_ncp
            is_coherent = np.ma.filled(is_coherent, Fales).astype(np.float64)
        else:
            is_coherent = np.zeros(nonprecip.shape, dtype=np.float64)

        # Find pixels that have a coherent signal
        nonprecip += is_coherent

    # Compute the probability a pixel (gate) has a valid echo during
    # non-precipitating events
    nonprecip_map = nonprecip / sample_size

    # Add clutter frequency map to radar object
    nonprecip = {
        'data': nonprecip_map,
        'long_name': 'Non-precipitating frequency map',
        'standard_name': 'nonprecip_map',
        'valid_min': 0.0,
        'valid_max': 1.0,
        '_FillValue': None,
        'units': None,
    }
    radar.add_field('nonprecip_map', nonprecip, replace_existing=False)

    return {
        'non-precipitating map': nonprecip_map,
        'last radar': radar,
        'sample size': sample_size,
        'radar files': [os.path.basename(f) for f in files],
        'sweeps in VCP': vcp_sweeps,
        'rays in VCP': vcp_rays,
        'min NCP': min_ncp,
    }
Ejemplo n.º 8
0
def histogram_from_json(filename,
                        field,
                        inpdir=None,
                        texture_window=(3, 3),
                        min_sample=5,
                        num_bins=10,
                        limits=None,
                        min_ncp=0.5,
                        vcp_sweeps=None,
                        vcp_rays=None,
                        min_sweep=None,
                        max_sweep=None,
                        min_range=None,
                        max_range=None,
                        rays_wrap_around=False,
                        exclude_fields=None,
                        fill_value=None,
                        ncp_field=None,
                        verbose=False):
    """
    """

    # Parse fill value
    if fill_value is None:
        fill_value = get_fillvalue()

    # Parse field names
    if ncp_field is None:
        ncp_field = get_field_name('normalized_coherent_power')

    # Parse files from JSON file
    with open(filename, 'r') as fid:
        files = json.load(fid)

    # Append input directory if given
    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    if verbose:
        print 'Total number of radar files to process = %i' % len(files)

    # Parse texture window parameters
    ray_window, gate_window = texture_window

    # Loop over all files
    counts = np.zeros(num_bins, dtype=np.float64)
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=exclude_fields)

        # Check radar VCP
        if vcp_sweeps is not None and radar.nsweeps != vcp_sweeps:
            continue
        if vcp_rays is not None and radar.nrays != vcp_rays:
            continue

        if verbose:
            print 'Processing file %s' % os.path.basename(f)

        # Compute texture fields
        _compute_field(radar,
                       field,
                       ray_window=ray_window,
                       gate_window=gate_window,
                       min_sample=min_sample,
                       min_ncp=min_ncp,
                       min_sweep=min_sweep,
                       max_sweep=max_sweep,
                       min_range=min_range,
                       max_range=max_range,
                       rays_wrap_around=rays_wrap_around,
                       fill_value=fill_value,
                       ncp_field=ncp_field)

        # Parse data and compute histogram
        data = radar.fields['{}_texture'.format(field)]['data']
        hist, bin_edges = np.histogram(data.compressed(),
                                       bins=num_bins,
                                       range=limits,
                                       normed=False,
                                       weights=None,
                                       density=False)
        counts += hist

    # Compute bin centers
    bin_centers = bin_edges[:-1] + np.diff(bin_edges) / 2.0

    # Compute normalized histogram and probability density
    counts_norm = counts / counts.max()
    pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges))

    return {
        'field': '{}_texture'.format(field),
        'histogram counts': counts,
        'normalized histogram': counts_norm,
        'probability density': pdf,
        'number of bins': num_bins,
        'limits': limits,
        'bin edges': bin_edges,
        'bin centers': bin_centers,
        'radar files': [os.path.basename(f) for f in files],
        'min sweep': min_sweep,
        'max sweep': max_sweep,
        'min range': min_range,
        'max range': max_range,
        'min normalized coherent power': min_ncp,
        'sweeps in VCP': vcp_sweeps,
        'rays in VCP': vcp_rays,
        'ray window size': ray_window,
        'gate window size': gate_window,
    }
Ejemplo n.º 9
0
def _loop_over_dict(
        json_file, pickle_file, inpdir=None, outdir=None, verbose=False):
    """
    """

    # Parse files from JSON file
    with open(json_file, 'r') as fid:
        files = json.load(fid)

    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    # Loop over all files
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=EXCLUDE_FIELDS)

        if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS:
            continue
        if VCP_RAYS is not None and radar.nrays != VCP_RAYS:
            continue

        if verbose:
            print 'Processing file %s' % os.path.basename(f)

        # Determine significant detection of the radar
        gatefilter = noise.velocity_coherency(
            radar, gatefilter=None, num_bins=BINS_VDOP_COHER,
            limits=LIMITS_VDOP_COHER, texture_window=(3, 3),
            texture_sample=5, min_sigma=None, max_sigma=None, nyquist=None,
            rays_wrap_around=False, remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_value=None,
            verbose=verbose)
        gatefilter = noise.spectrum_width_coherency(
            radar, gatefilter=gatefilter, num_bins=BINS_SW_COHER,
            limits=LIMITS_SW_COHER, texture_window=(3, 3), texture_sample=5,
            min_sigma=None, max_sigma=None, rays_wrap_around=False,
            remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE, fill_value=None, verbose=verbose)
        gatefilter = noise.significant_detection(
            radar, gatefilter=gatefilter, remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, min_ncp=MIN_NCP,
            detect_field=None, verbose=verbose)

        # Compute histogram counts for each texture field
        texture_fields.histograms_from_radar(
            radar, HIST_DICT, gatefilter=gatefilter,
            texture_window=TEXTURE_WINDOW, texture_sample=TEXTURE_SAMPLE,
            min_ncp=MIN_NCP, min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP,
            min_range=MIN_RANGE, max_range=MAX_RANGE, rays_wrap_around=False,
            fill_value=None, ncp_field=NCP_FIELD, verbose=verbose)

    # Normalize histograms for each field and compute probability densities
    for field in HIST_DICT:

        # Parse bin edges and histogram counts
        bin_edges = HIST_DICT[field]['bin edges']
        counts = HIST_DICT[field]['histogram counts']

        # Compute normalized histogram and probability density
        # Add these to the histogram dictionary
        counts_norm = counts.astype(np.float64) / counts.max()
        pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges))
        HIST_DICT[field]['normalized histogram'] = counts_norm
        HIST_DICT[field]['probability density'] = pdf

        # Include other parameters in the histogram dictionary
        HIST_DICT[field]['radar files'] = files
        HIST_DICT[field]['min sweep'] = MIN_SWEEP
        HIST_DICT[field]['max sweep'] = MAX_SWEEP
        HIST_DICT[field]['min range'] = MIN_RANGE
        HIST_DICT[field]['max range'] = MAX_RANGE
        HIST_DICT[field]['sweeps in VCP'] = VCP_SWEEPS
        HIST_DICT[field]['rays in VCP'] = VCP_RAYS
        HIST_DICT[field]['minimum normalized coherent power'] = MIN_NCP

    # Change dictionary field names to include texture
    for field in HIST_DICT.keys():
        HIST_DICT['{}_texture'.format(field)] = HIST_DICT.pop(field)

    # Pickle histogram data
    texture_fields._pickle_histograms(
        HIST_DICT, pickle_file, outdir=outdir)

    return
Ejemplo n.º 10
0
    if args.verbose:
        print("Number of files to process: {}".format(len(files)))

    for filename in files:

        if args.verbose:
            print("Processing file: {}".format(os.path.basename(filename)))

        # Create tar file instance
        tf = tarfile.TarFile(name=filename, mode="r", format=None)

        for member in tf.getmembers():

            if args.verbose:
                print("Plotting file: {}".format(member.name))

            # Read radar data from file object
            radar = read(tf.extractfile(member), exclude_fields=EXCLUDE_FIELDS)

            # Record start time
            start = time.time()

            # Call desired plotting function
            multipanel(radar, args.outdir, dpi=args.dpi, debug=args.debug, verbose=args.verbose)

            # Record elapsed time
            elapsed = time.time() - start
            if args.verbose:
                print("Elapsed time to save plot: {:.0f} sec".format(elapsed))
Ejemplo n.º 11
0
def process_file(filename, outdir, debug=False, verbose=False):
    """
    """

    # Read radar data
    if USE_RADX:
        radar = read_radx(filename)
    else:
        radar = read(filename, exclude_fields=None)

    # Radar VCP check
    if CHECK_VCP:
        if NSWEEPS is not None and radar.nsweeps != NSWEEPS:
            return

    if verbose:
        print 'Processing file: {}'.format(os.path.basename(filename))

    if debug:
        print 'Number of sweeps: {}'.format(radar.nsweeps)

    if USE_RADX:
        # Create file metadata object
        meta = FileMetadata(
            'nexrad_archive', field_names=None, additional_metadata=None,
            file_field_names=False, exclude_fields=None)

        # Remove unnecessary fields
        for field in REMOVE_FIELDS:
            radar.fields.pop(field, None)

        # Rename fields to default Py-ART names
        for field in radar.fields.keys():
            default_field = meta.get_field_name(field)
            radar.fields[default_field] = radar.fields.pop(field, None)

    # Step 1: Determine radar significant detection
    # Since NEXRAD WSR-88D Level II data is already processed to some degree,
    # this amounts to essentially removing salt and pepper noise
    gf = noise._significant_features(
        radar, REFL_FIELD, gatefilter=None, size_bins=SIZE_BINS,
        size_limits=SIZE_LIMITS, structure=STRUCTURE, remove_size_field=False,
        fill_value=None, size_field=None, debug=debug)
    gf = noise.significant_detection(
        radar, gatefilter=gf, remove_small_features=False, size_bins=SIZE_BINS,
        size_limits=SIZE_LIMITS, fill_holes=FILL_HOLES, dilate=DILATE,
        structure=STRUCTURE, iterations=1, rays_wrap_around=False,
        min_ncp=None, detect_field=None, debug=debug, verbose=verbose)

    # Step 2: Doppler velocity correction
    if DEALIAS == 'phase':
        vdop_corr = dealias_unwrap_phase(
            radar, gatefilter=gf, unwrap_unit='sweep', nyquist_vel=None,
            rays_wrap_around=True, keep_original=False, vel_field=None,
            corr_vel_field=VDOP_CORR_FIELD)
    elif DEALIAS == 'region':
        vdop_corr = dealias_region_based(
            radar, gatefilter=gf, interval_splits=INTERVAL_SPLITS,
            interval_limits=None, skip_between_rays=2, skip_along_ray=2,
            centered=True, nyquist_vel=None, rays_wrap_around=True,
            keep_original=False, vel_field=None,
            corr_vel_field=VDOP_CORR_FIELD)
    else:
        raise ValueError('Unsupported velocity correction routine')

    radar.add_field(VDOP_CORR_FIELD, vdop_corr, replace_existing=True)

    # Step 3: Reflectivity correction
    # Currently no correction procedures are applied to the reflectivity field
    # due to minimal attenuation at S-band
    refl_corr = radar.fields[REFL_FIELD].copy()
    radar.add_field(REFL_CORR_FIELD, refl_corr, replace_existing=True)

    # Step 4: Interpolate missing gates
    basic_fixes.interpolate_missing(
        radar, fields=FILL_FIELDS, interp_window=FILL_WINDOW,
        interp_sample=FILL_SAMPLE, kind='mean', rays_wrap_around=False,
        fill_value=None, debug=debug, verbose=verbose)

    # Add metadata
    _add_metadata(radar, filename)

    # ARM file name protocols
    date_stamp = datetimes_from_radar(radar).min().strftime('%Y%m%d.%H%M%S')
    fname = 'nexradwsr88d{}cmac{}.{}.{}.cdf'.format(QF, FN, DL, date_stamp)

    # Write CMAC NetCDF file
    write_cfradial(os.path.join(outdir, fname), radar, format=FORMAT,
                   arm_time_variables=True)

    return
Ejemplo n.º 12
0
def histogram_from_json(
        filename, field, inpdir=None, bins=10, limits=None, min_ncp=0.5,
        vcp_sweeps=None, vcp_rays=None, min_sweep=None, max_sweep=None,
        exclude_fields=None, fill_value=None, ncp_field=None, verbose=False):
    """
    """

    # Parse fill value
    if fill_value is None:
        fill_value = get_fillvalue()

    # Parse field names
    if ncp_field is None:
        ncp_field = get_field_name('normalized_coherent_power')

    # Parse files from JSON file
    with open(filename, 'r') as fid:
        files = json.load(fid)

    # Append input directory if given
    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    if verbose:
        print 'Total number of radar files to process = %i' % len(files)

    # Loop over all files
    histogram = np.zeros(bins, dtype=np.float64)
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=exclude_fields)

        # Check radar VCP
        if vcp_sweeps is not None and radar.nsweeps != vcp_sweeps:
            continue
        if vcp_rays is not None and radar.nrays != vcp_rays:
            continue

        if verbose:
            print 'Processing file %s' % os.path.basename(f)

        # Parse radar fields
        data = radar.fields[field]['data']

        # Mask sweeps outside specified range
        if min_sweep is not None:
            i = radar.sweep_start_ray_index['data'][min_sweep]
            data[:i+1,:] = np.ma.masked
        if max_sweep is not None:
            i = radar.sweep_end_ray_index['data'][max_sweep]
            data[i+1:,:] = np.ma.masked

        # Mask incoherent echoes
        if min_ncp is not None:
            ncp = radar.fields[ncp_field]['data']
            data = np.ma.masked_where(ncp < min_ncp, data)

        # Bin data and compute frequencies
        hist, bin_edges = np.histogram(
            data.compressed(), bins=bins, range=limits, normed=False,
            weights=None, density=False)
        histogram += hist

    # Compute bin centers
    bin_centers = bin_edges[:-1] + np.diff(bin_edges) / 2.0

    # Compute normalized histogram and probability density
    histogram_norm = histogram / histogram.max()
    pdf = histogram_norm / np.sum(histogram_norm * np.diff(bin_edges))

    return {
        'field': field,
        'histogram counts': histogram,
        'normalized histogram': histogram_norm,
        'probability density': pdf,
        'number of bins': bins,
        'limits': limits,
        'bin edges': bin_edges,
        'bin centers': bin_centers,
        'radar files': [os.path.basename(f) for f in files],
        'min sweep': min_sweep,
        'max sweep': max_sweep,
        'min normalized coherent power': min_ncp,
        'sweeps in VCP': vcp_sweeps,
        'rays in VCP': vcp_rays,
        }