Exemple #1
0
    async def fetch(cls, site, dt, local=False):
        if local:
            url = f"http://127.0.0.1:8000/data/l2raw/{site}{dt.strftime('%Y%m%d_%H%M%S')}_V06"
        else:
            url = f"{_url_base}/{site}/{site}_{dt.strftime('%Y%m%d_%H%M')}"

        _logger.debug(
            f"Downloading radar volume for {site} at {dt.strftime('%d %b %Y %H%M UTC')}"
        )
        bio = BytesIO()
        bio.write(await download(url))
        bio.seek(0)

        rfile = read_nexrad_archive(bio)
        rfile_dealias = dealias_unwrap_phase(rfile)
        dt = datetime.strptime(rfile.time['units'],
                               'seconds since %Y-%m-%dT%H:%M:%SZ')

        sweeps = []
        for field in rfile.fields.keys():
            for ie, elv in enumerate(rfile.fixed_angle['data']):
                istart, iend = rfile.get_start_end(ie)
                azimuths = rfile.get_azimuth(ie)
                ranges = rfile.range['data']

                nyquist = rfile.get_nyquist_vel(ie)
                if field == 'velocity' and nyquist < 10:
                    continue
                elif field != 'velocity' and len(sweeps) > 0 and sweeps[
                        -1].elevation == elv and sweeps[-1].field == field:
                    # Check to see if this is a "duplicate" sweep
                    if nyquist > 10:
                        # Assume this is the short-range sweep and ignore it
                        continue
                    else:
                        # Assume that somehow the short-range sweep got put in the file
                        # first and take it out. I don't think this should ever happen.
                        sweeps.pop()

                saz = azimuths[0]
                eaz = azimuths[
                    -1] if azimuths[-1] > azimuths[0] else azimuths[-1] + 360
                dazim = round((eaz - saz) / len(azimuths), 1)

                dt_sweep = dt + timedelta(seconds=rfile.time['data'][istart])

                if field == 'velocity':
                    field_data = rfile_dealias['data'][istart:(iend + 1)]
                else:
                    field_data = rfile.get_field(ie, field)

                rs = RadarSweep(site, dt_sweep, field, elv, azimuths[0],
                                float(ranges[0]), dazim, 250, field_data)
                sweeps.append(rs)
        return cls(sweeps)
def process_NEXRAD(start_hour, end_hour, csv=False):
    # start_hour = datetime(2015, 06, 2, 12, 0, 0)
    # end_hour = datetime(2015, 06, 2, 13, 0, 0)
    td = timedelta(hours=1)

    while start_hour < end_hour:

        for i, site in enumerate(radars):

            # Setup output directories
            image_dir = "%s/images/%s/%s" % (os.getcwd(), site, start_hour.strftime("%Y%m%d"))
            csv_dir = "%s/csv_vad/%s/%s" % (os.getcwd(), site, start_hour.strftime("%Y%m%d"))

            if not os.path.isdir(image_dir):
                os.makedirs(image_dir)

            if not os.path.isdir(csv_dir):
                os.makedirs(csv_dir)

            d = 'D:\\TMBell\\projects\\VADAnalysis\\data\\%s\\raw\\%s' % (site, start_hour.strftime("%Y%m%d"))
            file_glob = "%s%s" % (site, start_hour.strftime("%Y%m%d_%H*"))
            in_files = glob(os.path.join(d, file_glob))

            for in_file in in_files:
                logging.info(in_file)

                try:
                    # Read in the file and dealias
                    radar = io.nexrad_archive.read_nexrad_archive(in_file)
                    logging.info("Dealiasing %s using PyART package..." % in_file)
                    VEL2 = correct.dealias_unwrap_phase(radar)
                    radar.add_field('VEL2', VEL2)

                    # Get some parameters
                    time = datetime_from_radar(radar)
                    time_str = "%s_%s" % (site, time.strftime("%Y%m%d_%H%M%S"))
                    elevs = utils.get_elevs(radar)
                    radar_elev = radar.altitude['data'][0]

                    # print "Trying 'our' method"
                    plt.figure(1, figsize=(15, 7))
                    our_u, our_v, hgt, gates = vad.get_uv_vs_hgt(radar, 'VEL2')
                    our_RMSE = utils.calc_RMSE(radar, 'VEL2', our_u, our_v)
                    display.u_v_rmse_plot(our_u, our_v, our_RMSE, hgt, title=time_str, elevs=elevs, radar_elev=radar_elev)

                    img_name = "%s_%s" % (time_str, 'fig1')
                    plt.savefig(os.path.join(image_dir, img_name))
                    plt.clf()

                    if csv:
                        csv_name = time_str + ".csv"
                        csv_name = os.path.join(csv_dir, csv_name)
                        logging.info("Exporting VAD as CSV: " + csv_name)
                        vad_csv.vad_to_csv(csv_name, site, radar, our_u, our_v, our_RMSE, hgt, elevs)

                except IOError:
                    logging.warning("Error Reading NEXRAD lvlII file: " + str(in_file))
                except KeyError, e:
                    logging.warning("Key not found " + str(e))
                except Exception, e:
                    logging.warning(e)
def process_file(filename, outdir, dl='b1', verbose=False):
    """
    """

    if verbose:
        print 'Processing file: {}'.format(os.path.basename(filename))

    # Read radar data
    radar = read_kazr(filename, exclude_fields=None)

    # Step 1: Radar significant detection
    # Includes Hildebrand noise floor estimate and Doppler velocity coherency
    gf = noise.velocity_coherency(
        radar, gatefilter=None, num_bins=VDOP_COHER_BINS,
        limits=VDOP_COHER_LIMITS, texture_window=TEXTURE_WINDOW,
        texture_sample=TEXTURE_SAMPLE, min_sigma=None, max_sigma=None,
        nyquist=None, rays_wrap_around=None, remove_salt=False,
        fill_value=None, vdop_field=VDOP_FIELD, vdop_text_field=None,
        cohere_field=None, verbose=verbose)
    gf = noise.hildebrand_noise(
        radar, gatefilter=gf, scale=1.0, remove_salt=False,
        rays_wrap_around=False, fill_value=None, power_field=POWER_FIELD,
        noise_field=None, verbose=verbose)
    gf = noise.significant_detection(
        radar, gatefilter=gf, min_ncp=None, remove_salt=True,
        salt_window=SALT_WINDOW, salt_sample=SALT_SAMPLE, fill_holes=False,
        dilate=False, structure=None, rays_wrap_around=False, ncp_field=None,
        detect_field=None, verbose=verbose)

    # Step 2: Doppler velocity correction
    if DEALIAS == 'phase':
        vdop_corr = dealias_unwrap_phase(
            radar, gatefilter=gf, unwrap_unit='sweep', nyquist_vel=None,
            rays_wrap_around=False, keep_original=False, skip_checks=True,
            vel_field=VDOP_FIELD, corr_vel_field=None)

    elif DEALIAS == 'region':
        vdop_corr = dealias_region_based(
            radar, gatefilter=gf, interval_splits=INTERVAL_SPLITS,
            interval_limits=None, skip_between_rays=2, skip_along_ray=2,
            centered=True, nyquist_vel=None, rays_wrap_around=False,
            keep_original=False, vel_field=VDOP_FIELD, corr_vel_field=None)

    else:
        raise ValueError('Unsupported velocity correction routine')

    radar.add_field(CORR_VDOP_FIELD, vdop_corr, replace_existing=True)

    # TODO
    # Step 3: Reflectivity correction

    # Parse metadata
    radar.metadata = _create_metadata(radar, filename)

    # ARM file name protocols
    date = datetime_from_radar(radar).strftime('%Y%m%d.%H%M%S')
    filename = 'sgpkazrgecmacC1.{}.{}.cdf'.format(dl, date)

    # Write CMAC NetCDF file
    write_cfradial(os.path.join(outdir, filename), radar, format=FORMAT,
                   arm_time_variables=True)

    return