Example #1
0
def _loop_over_dict(
        json_file, pickle_file, inpdir=None, outdir=None, verbose=False,
        debug=False):
    """
    """

    # Parse files from JSON
    with open(json_file, 'r') as fid:
        files = json.load(fid)

    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    # Loop over all files
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=EXCLUDE_FIELDS)

        if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS:
            continue
        if VCP_RAYS is not None and radar.nrays != VCP_RAYS:
            continue

        if verbose:
            print 'Processing file %s' % os.path.basename(f)

        # Determine significant detection of the radar
        gatefilter = noise.velocity_coherency(
            radar, gatefilter=None, num_bins=BINS_VDOP_COHER,
            limits=LIMITS_VDOP_COHER, texture_window=(3, 3),
            texture_sample=5, min_sigma=None, max_sigma=None, nyquist=None,
            rays_wrap_around=False, remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_value=None,
            verbose=verbose)
        gatefilter = noise.spectrum_width_coherency(
            radar, gatefilter=gatefilter, num_bins=BINS_SW_COHER,
            limits=LIMITS_SW_COHER, texture_window=(3, 3), texture_sample=5,
            min_sigma=None, max_sigma=None, rays_wrap_around=False,
            remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE, fill_value=None, verbose=True)
        gatefilter = noise.significant_detection(
            radar, gatefilter=gatefilter, remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, min_ncp=MIN_NCP,
            detect_field=None, verbose=verbose)

        # Compute histogram counts for each field
        geo.height_histogram_from_radar(
            radar, HIST_DICT, gatefilter=gatefilter, min_ncp=MIN_NCP,
            min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, min_range=MIN_RANGE,
            max_range=MAX_RANGE, fill_value=None, ncp_field=NCP_FIELD,
            verbose=verbose, debug=debug)

    # Parse bin edges and histogram counts
    bin_edges = HIST_DICT['bin edges']
    counts = HIST_DICT['histogram counts']

    # Compute normalized histogram and probability density
    # Add these to the histogram dictionary
    counts_norm = counts.astype(np.float64) / counts.max()
    pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges))
    HIST_DICT['normalized histogram'] = counts_norm
    HIST_DICT['probability density'] = pdf

    # Include other parameters in the histogram dictionary
    HIST_DICT['radar files'] = files
    HIST_DICT['min sweep'] = MIN_SWEEP
    HIST_DICT['max sweep'] = MAX_SWEEP
    HIST_DICT['min range'] = MIN_RANGE
    HIST_DICT['max range'] = MAX_RANGE
    HIST_DICT['sweeps in VCP'] = VCP_SWEEPS
    HIST_DICT['rays in VCP'] = VCP_RAYS
    HIST_DICT['min NCP'] = MIN_NCP

    # Pickle histogram data
    geo._pickle_histograms(
        HIST_DICT, pickle_file, outdir=outdir)

    return
Example #2
0
def _loop_over_dict(json_file, pickle_file, inpdir=None, outdir=None, verbose=False, debug=False):
    """
    """

    # Parse files from JSON
    with open(json_file, "r") as fid:
        files = json.load(fid)

    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    # Loop over all files
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=EXCLUDE_FIELDS)

        if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS:
            continue
        if VCP_RAYS is not None and radar.nrays != VCP_RAYS:
            continue

        if verbose:
            print "Processing file %s" % os.path.basename(f)

        # Determine significant detection of the radar
        gatefilter = noise.velocity_coherency(
            radar,
            gatefilter=None,
            num_bins=BINS_VDOP_COHER,
            limits=LIMITS_VDOP_COHER,
            texture_window=(3, 3),
            texture_sample=5,
            min_sigma=None,
            max_sigma=None,
            nyquist=None,
            rays_wrap_around=False,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            fill_value=None,
            verbose=verbose,
        )
        gatefilter = noise.spectrum_width_coherency(
            radar,
            gatefilter=gatefilter,
            num_bins=BINS_SW_COHER,
            limits=LIMITS_SW_COHER,
            texture_window=(3, 3),
            texture_sample=5,
            min_sigma=None,
            max_sigma=None,
            rays_wrap_around=False,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            fill_value=None,
            verbose=True,
        )
        gatefilter = noise.significant_detection(
            radar,
            gatefilter=gatefilter,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            min_ncp=MIN_NCP,
            detect_field=None,
            verbose=verbose,
        )

        # Compute histogram counts for each field
        geo.height_histogram_from_radar(
            radar,
            HIST_DICT,
            gatefilter=gatefilter,
            min_ncp=MIN_NCP,
            min_sweep=MIN_SWEEP,
            max_sweep=MAX_SWEEP,
            min_range=MIN_RANGE,
            max_range=MAX_RANGE,
            fill_value=None,
            ncp_field=NCP_FIELD,
            verbose=verbose,
            debug=debug,
        )

    # Parse bin edges and histogram counts
    bin_edges = HIST_DICT["bin edges"]
    counts = HIST_DICT["histogram counts"]

    # Compute normalized histogram and probability density
    # Add these to the histogram dictionary
    counts_norm = counts.astype(np.float64) / counts.max()
    pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges))
    HIST_DICT["normalized histogram"] = counts_norm
    HIST_DICT["probability density"] = pdf

    # Include other parameters in the histogram dictionary
    HIST_DICT["radar files"] = files
    HIST_DICT["min sweep"] = MIN_SWEEP
    HIST_DICT["max sweep"] = MAX_SWEEP
    HIST_DICT["min range"] = MIN_RANGE
    HIST_DICT["max range"] = MAX_RANGE
    HIST_DICT["sweeps in VCP"] = VCP_SWEEPS
    HIST_DICT["rays in VCP"] = VCP_RAYS
    HIST_DICT["min NCP"] = MIN_NCP

    # Pickle histogram data
    geo._pickle_histograms(HIST_DICT, pickle_file, outdir=outdir)

    return
def process_file(filename, outdir, verbose=False):
    """
    """

    if verbose:
        print 'Processing file: {}'.format(os.path.basename(filename))

    # Read radar data
    radar = read_sigmet(filename, exclude_fields=EXLUDE_FIELDS)

    # Radar significant detection
    # Includes Doppler velocity coherency, spectrum width coherency, and
    # minimum normalized coherent power
    gf = noise.velocity_coherency(
        radar, gatefilter=None, num_bins=VDOP_COHER_BINS,
        limits=VDOP_COHER_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_sigma=None, max_sigma=None,
        rays_wrap_around=False, remove_salt=False, fill_value=None,
        vdop_field=VDOP_FIELD, vdop_text_field=None, cohere_field=None,
        verbose=verbose)
    gf = noise.velocity_phasor_coherency(
        radar, gatefilter=gf, num_bins=PHASE_COHER_BINS,
        limits=PHASE_COHER_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_sigma=None, max_sigma=None,
        rays_wrap_around=False, remove_salt=False, fill_value=None,
        vdop_field=VDOP_FIELD, vdop_phase_field=None, phase_text_field=None,
        cohere_field=None, verbose=verbose)
    gf = noise.spectrum_width_coherency(
        radar, gatefilter=gf, num_bins=SW_COHER_BINS,
        limits=SW_COHER_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_sigma=None, max_sigma=None,
        rays_wrap_around=False, remove_salt=False, fill_value=None,
        width_field=SW_FIELD, width_text_field=None, cohere_field=None,
        verbose=verbose)
    gf = noise.significant_detection(
        radar, gatefilter=gf, remove_salt=True, salt_window=SALT_WINDOW,
        salt_sample=SALT_SAMPLE, fill_holes=False, dilate=DILATE,
        structure=None, min_ncp=MIN_NCP, ncp_field=NCP_FIELD,
        detect_field=None, verbose=verbose)

    # Compute radar texture fields
    texture_fields.add_textures(
        radar, fields=TEXTURE_FIELDS, gatefilter=None,
        texture_window=TEXTURE_WINDOW, texture_sample=TEXTURE_SAMPLE,
        min_sweep=None, max_sweep=None, min_range=None, max_range=None,
        min_ncp=None, rays_wrap_around=False, fill_value=None,
        ncp_field=NCP_FIELD)

    # Echo classification
    bayes.classify(
        radar, textures=TEXTURES, moments=MOMENTS, heights=HEIGHTS,
        nonprecip_map=None, gatefilter=gf, weights=1.0, class_prob='equal',
        min_inputs=3, zero=ZERO, ignore_inputs=IGNORE_INPUTS, use_insects=True,
        fill_value=None, cloud_field=CLOUD_FIELD, ground_field=GROUND_FIELD,
        insect_field=INSECT_FIELD, ncp_field=NCP_FIELD, verbose=verbose)

    # Filter ground clutter gates
    gf.exclude_equal(
        'radar_echo_classification', 1, exclude_masked=True, op='or')

    # Doppler velocity correction
    vdop_corr = dealias_region_based(
        radar, gatefilter=gf, interval_splits=3, interval_limits=None,
        skip_between_rays=2, skip_along_ray=2, centered=True, nyquist_vel=None,
        rays_wrap_around=True, keep_original=False, vel_field=VDOP_FIELD,
        corr_vel_field=CORR_VDOP_FIELD)
    radar.add_field(CORR_VDOP_FIELD, vdop_corr, replace_existing=False)

    # TODO: reflectivity correction

    # Parse metadata
    radar.metadata = _create_metadata(radar, filename)

    # ARM file name protocols
    date = datetime_from_radar(radar).strftime('%Y%m%d.%H%M%S')
    filename = 'sgpxsaprppicmac{}.{}.{}.cdf'.format(FN, DL, date)

    # Write CMAC NetCDF file
    write_cfradial(os.path.join(outdir, filename), radar, format=FORMAT,
                   arm_time_variables=True)

    return