Esempio n. 1
0
def _loop_over_dict(json_file, pickle_file, inpdir=None, outdir=None, verbose=False, debug=False):
    """
    """

    # Parse files from JSON
    with open(json_file, "r") as fid:
        files = json.load(fid)

    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    # Loop over all files
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=EXCLUDE_FIELDS)

        if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS:
            continue
        if VCP_RAYS is not None and radar.nrays != VCP_RAYS:
            continue

        if verbose:
            print "Processing file %s" % os.path.basename(f)

        # Determine significant detection of the radar
        gatefilter = noise.velocity_coherency(
            radar,
            gatefilter=None,
            num_bins=BINS_VDOP_COHER,
            limits=LIMITS_VDOP_COHER,
            texture_window=(3, 3),
            texture_sample=5,
            min_sigma=None,
            max_sigma=None,
            nyquist=None,
            rays_wrap_around=False,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            fill_value=None,
            verbose=verbose,
        )
        gatefilter = noise.spectrum_width_coherency(
            radar,
            gatefilter=gatefilter,
            num_bins=BINS_SW_COHER,
            limits=LIMITS_SW_COHER,
            texture_window=(3, 3),
            texture_sample=5,
            min_sigma=None,
            max_sigma=None,
            rays_wrap_around=False,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            fill_value=None,
            verbose=True,
        )
        gatefilter = noise.significant_detection(
            radar,
            gatefilter=gatefilter,
            remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE,
            min_ncp=MIN_NCP,
            detect_field=None,
            verbose=verbose,
        )

        # Compute histogram counts for each field
        geo.height_histogram_from_radar(
            radar,
            HIST_DICT,
            gatefilter=gatefilter,
            min_ncp=MIN_NCP,
            min_sweep=MIN_SWEEP,
            max_sweep=MAX_SWEEP,
            min_range=MIN_RANGE,
            max_range=MAX_RANGE,
            fill_value=None,
            ncp_field=NCP_FIELD,
            verbose=verbose,
            debug=debug,
        )

    # Parse bin edges and histogram counts
    bin_edges = HIST_DICT["bin edges"]
    counts = HIST_DICT["histogram counts"]

    # Compute normalized histogram and probability density
    # Add these to the histogram dictionary
    counts_norm = counts.astype(np.float64) / counts.max()
    pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges))
    HIST_DICT["normalized histogram"] = counts_norm
    HIST_DICT["probability density"] = pdf

    # Include other parameters in the histogram dictionary
    HIST_DICT["radar files"] = files
    HIST_DICT["min sweep"] = MIN_SWEEP
    HIST_DICT["max sweep"] = MAX_SWEEP
    HIST_DICT["min range"] = MIN_RANGE
    HIST_DICT["max range"] = MAX_RANGE
    HIST_DICT["sweeps in VCP"] = VCP_SWEEPS
    HIST_DICT["rays in VCP"] = VCP_RAYS
    HIST_DICT["min NCP"] = MIN_NCP

    # Pickle histogram data
    geo._pickle_histograms(HIST_DICT, pickle_file, outdir=outdir)

    return
Esempio n. 2
0
def _loop_over_dict(
        json_file, pickle_file, inpdir=None, outdir=None, verbose=False,
        debug=False):
    """
    """

    # Parse files from JSON
    with open(json_file, 'r') as fid:
        files = json.load(fid)

    if inpdir is not None:
        files = [os.path.join(inpdir, f) for f in files]

    # Loop over all files
    for f in files:

        # Read radar data
        radar = read(f, exclude_fields=EXCLUDE_FIELDS)

        if VCP_SWEEPS is not None and radar.nsweeps != VCP_SWEEPS:
            continue
        if VCP_RAYS is not None and radar.nrays != VCP_RAYS:
            continue

        if verbose:
            print 'Processing file %s' % os.path.basename(f)

        # Determine significant detection of the radar
        gatefilter = noise.velocity_coherency(
            radar, gatefilter=None, num_bins=BINS_VDOP_COHER,
            limits=LIMITS_VDOP_COHER, texture_window=(3, 3),
            texture_sample=5, min_sigma=None, max_sigma=None, nyquist=None,
            rays_wrap_around=False, remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_value=None,
            verbose=verbose)
        gatefilter = noise.spectrum_width_coherency(
            radar, gatefilter=gatefilter, num_bins=BINS_SW_COHER,
            limits=LIMITS_SW_COHER, texture_window=(3, 3), texture_sample=5,
            min_sigma=None, max_sigma=None, rays_wrap_around=False,
            remove_salt=REMOVE_SALT, salt_window=SALT_WINDOW,
            salt_sample=SALT_SAMPLE, fill_value=None, verbose=True)
        gatefilter = noise.significant_detection(
            radar, gatefilter=gatefilter, remove_salt=REMOVE_SALT,
            salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, min_ncp=MIN_NCP,
            detect_field=None, verbose=verbose)

        # Compute histogram counts for each field
        geo.height_histogram_from_radar(
            radar, HIST_DICT, gatefilter=gatefilter, min_ncp=MIN_NCP,
            min_sweep=MIN_SWEEP, max_sweep=MAX_SWEEP, min_range=MIN_RANGE,
            max_range=MAX_RANGE, fill_value=None, ncp_field=NCP_FIELD,
            verbose=verbose, debug=debug)

    # Parse bin edges and histogram counts
    bin_edges = HIST_DICT['bin edges']
    counts = HIST_DICT['histogram counts']

    # Compute normalized histogram and probability density
    # Add these to the histogram dictionary
    counts_norm = counts.astype(np.float64) / counts.max()
    pdf = counts_norm / np.sum(counts_norm * np.diff(bin_edges))
    HIST_DICT['normalized histogram'] = counts_norm
    HIST_DICT['probability density'] = pdf

    # Include other parameters in the histogram dictionary
    HIST_DICT['radar files'] = files
    HIST_DICT['min sweep'] = MIN_SWEEP
    HIST_DICT['max sweep'] = MAX_SWEEP
    HIST_DICT['min range'] = MIN_RANGE
    HIST_DICT['max range'] = MAX_RANGE
    HIST_DICT['sweeps in VCP'] = VCP_SWEEPS
    HIST_DICT['rays in VCP'] = VCP_RAYS
    HIST_DICT['min NCP'] = MIN_NCP

    # Pickle histogram data
    geo._pickle_histograms(
        HIST_DICT, pickle_file, outdir=outdir)

    return
Esempio n. 3
0
def process_file(filename, outdir, dl='b1', verbose=False):
    """
    """

    if verbose:
        print 'Processing file: {}'.format(os.path.basename(filename))

    # Read radar data
    radar = read_kazr(filename, exclude_fields=None)

    # Step 1: Radar significant detection
    # Includes Hildebrand noise floor estimate and Doppler velocity coherency
    gf = noise.velocity_coherency(
        radar, gatefilter=None, num_bins=VDOP_COHER_BINS,
        limits=VDOP_COHER_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_sigma=None, max_sigma=None,
        nyquist=None, rays_wrap_around=None, remove_salt=False,
        fill_value=None, vdop_field=VDOP_FIELD, vdop_text_field=None,
        cohere_field=None, verbose=verbose)
    gf = noise.hildebrand_noise(
        radar, gatefilter=gf, scale=1.0, remove_salt=False,
        rays_wrap_around=False, fill_value=None, power_field=POWER_FIELD,
        noise_field=None, verbose=verbose)
    gf = noise.significant_detection(
        radar, gatefilter=gf, min_ncp=None, remove_salt=True,
        salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_holes=False,
        dilate=False, structure=None, rays_wrap_around=False, ncp_field=None,
        detect_field=None, verbose=verbose)

    # Step 2: Doppler velocity correction
    if DEALIAS == 'phase':
        vdop_corr = dealias_unwrap_phase(
            radar, gatefilter=gf, unwrap_unit='sweep', nyquist_vel=None,
            rays_wrap_around=False, keep_original=False, skip_checks=True,
            vel_field=VDOP_FIELD, corr_vel_field=None)

    elif DEALIAS == 'region':
        vdop_corr = dealias_region_based(
            radar, gatefilter=gf, interval_splits=INTERVAL_SPLITS,
            interval_limits=None, skip_between_rays=2, skip_along_ray=2,
            centered=True, nyquist_vel=None, rays_wrap_around=False,
            keep_original=False, vel_field=VDOP_FIELD, corr_vel_field=None)

    else:
        raise ValueError('Unsupported velocity correction routine')

    radar.add_field(CORR_VDOP_FIELD, vdop_corr, replace_existing=True)

    # TODO
    # Step 3: Reflectivity correction

    # Parse metadata
    radar.metadata = _create_metadata(radar, filename)

    # ARM file name protocols
    date = datetime_from_radar(radar).strftime('%Y%m%d.%H%M%S')
    filename = 'sgpkazrgecmacC1.{}.{}.cdf'.format(dl, date)

    # Write CMAC NetCDF file
    write_cfradial(os.path.join(outdir, filename), radar, format=FORMAT,
                   arm_time_variables=True)

    return
Esempio n. 4
0
def process_file(filename, outdir, debug=False, verbose=False):
    """
    """

    # Read radar data
    if USE_RADX:
        radar = read_radx(filename)
    else:
        radar = read(filename, exclude_fields=None)

    # Radar VCP check
    if CHECK_VCP:
        if NSWEEPS is not None and radar.nsweeps != NSWEEPS:
            return

    if verbose:
        print 'Processing file: {}'.format(os.path.basename(filename))

    if debug:
        print 'Number of sweeps: {}'.format(radar.nsweeps)

    if USE_RADX:
        # Create file metadata object
        meta = FileMetadata(
            'nexrad_archive', field_names=None, additional_metadata=None,
            file_field_names=False, exclude_fields=None)

        # Remove unnecessary fields
        for field in REMOVE_FIELDS:
            radar.fields.pop(field, None)

        # Rename fields to default Py-ART names
        for field in radar.fields.keys():
            default_field = meta.get_field_name(field)
            radar.fields[default_field] = radar.fields.pop(field, None)

    # Step 1: Determine radar significant detection
    # Since NEXRAD WSR-88D Level II data is already processed to some degree,
    # this amounts to essentially removing salt and pepper noise
    gf = noise._significant_features(
        radar, REFL_FIELD, gatefilter=None, size_bins=SIZE_BINS,
        size_limits=SIZE_LIMITS, structure=STRUCTURE, remove_size_field=False,
        fill_value=None, size_field=None, debug=debug)
    gf = noise.significant_detection(
        radar, gatefilter=gf, remove_small_features=False, size_bins=SIZE_BINS,
        size_limits=SIZE_LIMITS, fill_holes=FILL_HOLES, dilate=DILATE,
        structure=STRUCTURE, iterations=1, rays_wrap_around=False,
        min_ncp=None, detect_field=None, debug=debug, verbose=verbose)

    # Step 2: Doppler velocity correction
    if DEALIAS == 'phase':
        vdop_corr = dealias_unwrap_phase(
            radar, gatefilter=gf, unwrap_unit='sweep', nyquist_vel=None,
            rays_wrap_around=True, keep_original=False, vel_field=None,
            corr_vel_field=VDOP_CORR_FIELD)
    elif DEALIAS == 'region':
        vdop_corr = dealias_region_based(
            radar, gatefilter=gf, interval_splits=INTERVAL_SPLITS,
            interval_limits=None, skip_between_rays=2, skip_along_ray=2,
            centered=True, nyquist_vel=None, rays_wrap_around=True,
            keep_original=False, vel_field=None,
            corr_vel_field=VDOP_CORR_FIELD)
    else:
        raise ValueError('Unsupported velocity correction routine')

    radar.add_field(VDOP_CORR_FIELD, vdop_corr, replace_existing=True)

    # Step 3: Reflectivity correction
    # Currently no correction procedures are applied to the reflectivity field
    # due to minimal attenuation at S-band
    refl_corr = radar.fields[REFL_FIELD].copy()
    radar.add_field(REFL_CORR_FIELD, refl_corr, replace_existing=True)

    # Step 4: Interpolate missing gates
    basic_fixes.interpolate_missing(
        radar, fields=FILL_FIELDS, interp_window=FILL_WINDOW,
        interp_sample=FILL_SAMPLE, kind='mean', rays_wrap_around=False,
        fill_value=None, debug=debug, verbose=verbose)

    # Add metadata
    _add_metadata(radar, filename)

    # ARM file name protocols
    date_stamp = datetimes_from_radar(radar).min().strftime('%Y%m%d.%H%M%S')
    fname = 'nexradwsr88d{}cmac{}.{}.{}.cdf'.format(QF, FN, DL, date_stamp)

    # Write CMAC NetCDF file
    write_cfradial(os.path.join(outdir, fname), radar, format=FORMAT,
                   arm_time_variables=True)

    return
Esempio n. 5
0
def process_file(filename, outdir, debug=False, verbose=False):
    """
    """

    if verbose:
        print 'Processing file: {}'.format(os.path.basename(filename))

    # Read radar data
    radar = read_mdv(filename, exclude_fields=EXLUDE_FIELDS)

    if debug:
        print 'Number of sweeps: {}'.format(radar.nsweeps)

    # Step 1: Remove last 7 gates from each ray
    # These gates are reserved for signal testing only
    for field in radar.fields.keys():
        if verbose:
            print 'Removing signal testing gates: {}'.format(field)
        radar.fields[field]['data'][:,-7:] = np.ma.masked

    # Step 2: Radar significant detection
    # Includes Doppler velocity coherency, Doppler velocity phasor coherency,
    # and significant echo boundary detection
    gf = noise.velocity_coherency(
        radar, gatefilter=None, text_bins=VDOP_TEXT_BINS,
        text_limits=VDOP_TEXT_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, max_texture=None, nyquist=None,
        rays_wrap_around=False, remove_small_features=False, fill_value=None,
        vdop_field=VDOP_FIELD, text_field=None, coherent_field=None,
        debug=debug, verbose=verbose)
    gf = noise.velocity_phasor_coherency(
        radar, gatefilter=gf, text_bins=PHASOR_TEXT_BINS,
        text_limits=PHASOR_TEXT_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, max_texture=None,
        rays_wrap_around=False, remove_small_features=False, fill_value=None,
        vdop_field=VDOP_FIELD, phasor_field=None, text_field=None,
        coherent_field=None, debug=debug, verbose=verbose)
    gf = noise.echo_boundaries(
        radar, gatefilter=gf, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_texture=None,
        bounds_percentile=BOUNDS_PERCENTILE, remove_small_features=False,
        rays_wrap_around=False, fill_value=None, sqi_field=NCP_FIELD,
        text_field=None, bounds_field=None, debug=debug, verbose=verbose)
    gf = noise.significant_detection(
        radar, gatefilter=gf, remove_small_features=True, size_bins=SIZE_BINS,
        size_limits=SIZE_LIMITS, fill_holes=FILL_HOLES, dilate=DILATE,
        structure=STRUCTURE, iterations=ITERATIONS, min_ncp=MIN_NCP,
        ncp_field=NCP_FIELD, detect_field=None, debug=debug, verbose=verbose)

    # Step 3: Compute radar texture fields
    texture_fields.add_textures(
        radar, fields=TEXTURE_FIELDS, gatefilter=None,
        texture_window=TEXTURE_WINDOW, texture_sample=TEXTURE_SAMPLE,
        min_sweep=None, max_sweep=None, min_range=None, max_range=None,
        min_ncp=None, rays_wrap_around=False, fill_value=None,
        ncp_field=NCP_FIELD)

    # Step 4: Doppler velocity correction
    if DEALIAS.upper() == 'REGION':
        vdop_corr = dealias_region_based(
            radar, gatefilter=gf, interval_splits=INTERVAL_SPLITS,
            interval_limits=None, skip_between_rays=2, skip_along_ray=2,
            centered=True, nyquist_vel=None, rays_wrap_around=True,
            keep_original=False, vel_field=VDOP_FIELD,
            corr_vel_field=CORR_VDOP_FIELD)
    else:
        raise ValueError('Unsupported velocity correction routine')

    radar.add_field(CORR_VDOP_FIELD, vdop_corr, replace_existing=False)

    # TODO
    # Step 5: Reflectivity correction
    refl_corr = radar.fields[REFL_FIELD].copy()
    radar.add_field(CORR_REFL_FIELD, refl_corr, replace_existing=False)

    # Step 6: Interpolate missing gates
    basic_fixes.interpolate_missing(
        radar, fields=FILL_FIELDS, interp_window=FILL_WINDOW,
        interp_sample=FILL_SAMPLE, kind='mean', rays_wrap_around=False,
        fill_value=None, debug=debug, verbose=verbose)

    # Step 7: Remove unwanted fields before writing
    for field in REMOVE_FIELDS:
        if verbose:
            print 'Removing radar field before writing: {}'.format(field)
        radar.fields.pop(field, None)

    # Parse metadata
    _add_metadata(radar, filename)

    # ARM file name protocols
    date = datetimes_from_radar(radar).min().strftime('%Y%m%d.%H%M%S')
    fname = 'sgpcsaprsurcmac{}.{}.{}.cdf'.format(FN, DL, date)

    # Write CMAC NetCDF file
    write_cfradial(os.path.join(outdir, fname), radar, format=FORMAT,
                   arm_time_variables=True)

    return
Esempio n. 6
0
def process_file(filename, outdir, verbose=False):
    """
    """

    if verbose:
        print 'Processing file: {}'.format(os.path.basename(filename))

    # Read radar data
    radar = read_sigmet(filename, exclude_fields=EXLUDE_FIELDS)

    # Radar significant detection
    # Includes Doppler velocity coherency, spectrum width coherency, and
    # minimum normalized coherent power
    gf = noise.velocity_coherency(
        radar, gatefilter=None, num_bins=VDOP_COHER_BINS,
        limits=VDOP_COHER_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_sigma=None, max_sigma=None,
        rays_wrap_around=False, remove_salt=False, fill_value=None,
        vdop_field=VDOP_FIELD, vdop_text_field=None, cohere_field=None,
        verbose=verbose)
    gf = noise.velocity_phasor_coherency(
        radar, gatefilter=gf, num_bins=PHASE_COHER_BINS,
        limits=PHASE_COHER_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_sigma=None, max_sigma=None,
        rays_wrap_around=False, remove_salt=False, fill_value=None,
        vdop_field=VDOP_FIELD, vdop_phase_field=None, phase_text_field=None,
        cohere_field=None, verbose=verbose)
    gf = noise.spectrum_width_coherency(
        radar, gatefilter=gf, num_bins=SW_COHER_BINS,
        limits=SW_COHER_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_sigma=None, max_sigma=None,
        rays_wrap_around=False, remove_salt=False, fill_value=None,
        width_field=SW_FIELD, width_text_field=None, cohere_field=None,
        verbose=verbose)
    gf = noise.significant_detection(
        radar, gatefilter=gf, remove_salt=True, salt_window=SALT_WINDOW,
        salt_sample=SALT_SAMPLE, fill_holes=False, dilate=DILATE,
        structure=None, min_ncp=MIN_NCP, ncp_field=NCP_FIELD,
        detect_field=None, verbose=verbose)

    # Compute radar texture fields
    texture_fields.add_textures(
        radar, fields=TEXTURE_FIELDS, gatefilter=None,
        texture_window=TEXTURE_WINDOW, texture_sample=TEXTURE_SAMPLE,
        min_sweep=None, max_sweep=None, min_range=None, max_range=None,
        min_ncp=None, rays_wrap_around=False, fill_value=None,
        ncp_field=NCP_FIELD)

    # Echo classification
    bayes.classify(
        radar, textures=TEXTURES, moments=MOMENTS, heights=HEIGHTS,
        nonprecip_map=None, gatefilter=gf, weights=1.0, class_prob='equal',
        min_inputs=3, zero=ZERO, ignore_inputs=IGNORE_INPUTS, use_insects=True,
        fill_value=None, cloud_field=CLOUD_FIELD, ground_field=GROUND_FIELD,
        insect_field=INSECT_FIELD, ncp_field=NCP_FIELD, verbose=verbose)

    # Filter ground clutter gates
    gf.exclude_equal(
        'radar_echo_classification', 1, exclude_masked=True, op='or')

    # Doppler velocity correction
    vdop_corr = dealias_region_based(
        radar, gatefilter=gf, interval_splits=3, interval_limits=None,
        skip_between_rays=2, skip_along_ray=2, centered=True, nyquist_vel=None,
        rays_wrap_around=True, keep_original=False, vel_field=VDOP_FIELD,
        corr_vel_field=CORR_VDOP_FIELD)
    radar.add_field(CORR_VDOP_FIELD, vdop_corr, replace_existing=False)

    # TODO: reflectivity correction

    # Parse metadata
    radar.metadata = _create_metadata(radar, filename)

    # ARM file name protocols
    date = datetime_from_radar(radar).strftime('%Y%m%d.%H%M%S')
    filename = 'sgpxsaprppicmac{}.{}.{}.cdf'.format(FN, DL, date)

    # Write CMAC NetCDF file
    write_cfradial(os.path.join(outdir, filename), radar, format=FORMAT,
                   arm_time_variables=True)

    return