Пример #1
0
def do_multidop_for_time(frame_time):
    year_str = "%04d" % frame_time.year
    day_str = "%02d" % frame_time.day
    month_str = "%02d" % frame_time.month
    hour_str = "%02d" % frame_time.hour
    minute_str = "%02d" % frame_time.minute
    fname = (time_procedures.out_data_path + 'ddop/cf_compliant_grid' +
             year_str + month_str + day_str + hour_str + minute_str + '.nc')
    print('Does file ' + fname + ' exist?')
    if (not os.path.isfile(fname)):
        one_day_ago = frame_time - timedelta(days=1, minutes=1)
        sounding_times = time_procedures.get_sounding_times(one_day_ago.year,
                                                            one_day_ago.month,
                                                            one_day_ago.day,
                                                            one_day_ago.hour,
                                                            one_day_ago.minute,
                                                            frame_time.year,
                                                            frame_time.month,
                                                            frame_time.day,
                                                            frame_time.hour,
                                                            frame_time.minute,
                                                            minute_interval=60)
        sounding_time = sounding_times[len(sounding_times) - 1]
        Sounding_netcdf = time_procedures.get_sounding(sounding_time)

        # Convert timestamps to datetime format
        Time = Sounding_netcdf.variables['time_offset'][:]
        base_time = Sounding_netcdf.variables['base_time'][:]
        alt = Sounding_netcdf.variables['alt'][:]
        u = Sounding_netcdf.variables['u_wind'][:]
        v = Sounding_netcdf.variables['v_wind'][:]
        base_timestamp = timedelta(seconds=float(base_time)) + datetime(
            1970, 1, 1)
        Sounding_netcdf.close()

        time_delta = datetime(
            frame_time.year, frame_time.month, frame_time.day, frame_time.hour,
            frame_time.minute, frame_time.second) - base_timestamp
        seconds_in_file = time_delta.days * (24 * 60 * 60) + time_delta.seconds

        one_minute_later = frame_time + timedelta(minutes=5)
        ten_minutes_ago = frame_time - timedelta(minutes=10)
        one_minute_earlier = frame_time - timedelta(minutes=5)

        times_berr, dates = time_procedures.get_radar_times_berr(
            one_minute_earlier.year,
            one_minute_earlier.month,
            one_minute_earlier.day,
            one_minute_earlier.hour,
            one_minute_earlier.minute,
            one_minute_later.year,
            one_minute_later.month,
            one_minute_later.day,
            one_minute_later.hour,
            one_minute_later.minute,
            minute_interval=0)

        # Load sounding data (ARM format assumed)
        sounding_file_name = (time_procedures.out_data_path + 'soundings/' +
                              year_str + month_str + day_str + hour_str +
                              minute_str)
        file = open(sounding_file_name, 'w')
        """ Take 1000 evenly spaced levels from the sounding
             and place them into the file. Multidop needs a file
             that is space separated with each row being:
             altitude [m], u [m/s], w [m/s] """

        # Do not include invalid/missing entries
        us = u[u > -75]
        vs = v[u > -75]
        alts = alt[u > -75]
        step = int(math.floor(len(us) / 500))
        if (step > 0):
            for i in range(0, len(us), step):
                input_string = (str(alts[i]) + ' ' + str(us[i]) + ' ' +
                                str(vs[i]) + '\n')
                file.write(input_string)

        # If baloon popped below 15 km (approximate tropopause),
        # then don't use sounding
        if (alts[-1] < 15000 or step == 0):
            use_sounding = 0
        else:
            use_sounding = 1

        file.close()
        # Calculate texture of velocity field for Berrima and CPOL
        # if previous frame is not available, just use (u,v) = 0
        try:
            Radar = time_procedures.get_radar_from_cpol(frame_time)
            if (Radar.nsweeps == 1):
                print('CPOL radar only has one sweep!')
                return
        except:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            print(('Could not load CPOL radar data file! ' +
                   str(sys.exc_info()[0]) + str(sys.exc_info()[1]) +
                   str(exc_tb.tb_lineno)))
            return

        try:
            Radar_berr = time_procedures.get_radar_from_berr(times_berr[0])
        except:
            print('Cannot find matching time from Berrima radar, skipping')
            return

        if (frame_time.year <= 2007):
            cpol_ref_field = 'reflectivity'
            cpol_vel_field = 'velocity'
        else:
            cpol_ref_field = 'Refl'
            cpol_vel_field = 'Vel'

        bt = time.time()
        print('Calculating texture....')
        try:
            texture_field = pyart.filters.calculate_velocity_texture(
                Radar, vel_field=cpol_vel_field)
            Radar.add_field('velocity_texture',
                            texture_field,
                            replace_existing=True)
            texture_field = pyart.filters.calculate_velocity_texture(
                Radar_berr, vel_field='Vel')
            Radar_berr.add_field('velocity_texture',
                                 texture_field,
                                 replace_existing=True)
            print('Done!')
            print((time.time() - bt) / 60.0, 'minutes to process')
        except:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            print(('Could not calculate texture! ' + str(sys.exc_info()[0]) +
                   str(sys.exc_info()[1]) + str(exc_tb.tb_lineno)))
            return

        # Apply gatefilter based on velocity and despeckling
        gatefilter_Gunn = pyart.correct.despeckle_field(Radar,
                                                        cpol_ref_field,
                                                        size=6)
        gatefilter_Gunn.exclude_above('velocity_texture', 3)
        gatefilter_Gunn.exclude_below(cpol_ref_field, 1)

        gatefilter_Berr = pyart.correct.despeckle_field(Radar_berr,
                                                        'Refl',
                                                        size=6)
        gatefilter_Berr.exclude_above('velocity_texture', 4)
        gatefilter_Berr.exclude_below('Refl', 1)

        # Change variable names to DT (reflectivity) and
        # VT (velocity) expected by multidop
        # This assumes datasets already have aliasing corrections
        cp = deepcopy(Radar.fields[cpol_ref_field]['data'])
        texture = Radar.fields['velocity_texture']['data']
        Radar.add_field_like(cpol_ref_field, 'DT', cp, replace_existing=True)
        try:
            cp = deepcopy(Radar.fields['corrected_velocity']['data'])
            Radar.add_field_like('corrected_velocity',
                                 'VT',
                                 cp,
                                 replace_existing=True)
        except:
            print('No dealiased velocities from CPOL...skipping!')
            return

        cp = deepcopy(Radar_berr.fields['Refl']['data'])
        Radar_berr.add_field_like('Refl', 'DT', cp, replace_existing=True)
        try:
            cp = deepcopy(Radar_berr.fields['corrected_velocity']['data'])
            Radar_berr.add_field_like('corrected_velocity',
                                      'VT',
                                      cp,
                                      replace_existing=True)
        except:
            print('No dealiased velocities from CPOL...skipping!')
            return
        # The analysis engine currently expects the "missing_value" attribute
        DT_cpol_FV = 1.0 * Radar.fields['DT']['_FillValue']
        DT_berr_FV = 1.0 * Radar_berr.fields['DT']['_FillValue']
        VT_cpol_FV = 1.0 * Radar.fields['VT']['_FillValue']
        VT_berr_FV = 1.0 * Radar_berr.fields['VT']['_FillValue']
        Radar.fields['DT']['missing_value'] = DT_cpol_FV
        Radar_berr.fields['DT']['missing_value'] = DT_berr_FV
        Radar.fields['VT']['missing_value'] = VT_cpol_FV
        Radar_berr.fields['VT']['missing_value'] = VT_berr_FV

        # Grid the data to a Cartesian grid. The Dual doppler domain
        # does not extend ~60 km from both radars, so no need to store
        # more data than that.
        origin = (Radar.latitude['data'][0], Radar.longitude['data'][0])
        grid_cpol = time_procedures.grid_radar(Radar,
                                               origin=origin,
                                               xlim=(-60000, 50000),
                                               ylim=(-50000, 30000),
                                               fields=['DT', 'VT'],
                                               min_radius=750.0,
                                               bsp=1.0,
                                               nb=1.5,
                                               h_factor=3.0,
                                               gatefilter=gatefilter_Gunn,
                                               zlim=(500, 20000),
                                               grid_shape=(40, 81, 111))
        grid_Berr = time_procedures.grid_radar(Radar_berr,
                                               origin=origin,
                                               fields=['DT', 'VT'],
                                               xlim=(-60000, 50000),
                                               ylim=(-50000, 30000),
                                               zlim=(500, 20000),
                                               min_radius=750.0,
                                               grid_shape=(40, 81, 111),
                                               gatefilter=gatefilter_Berr,
                                               bsp=1.0,
                                               nb=1.5,
                                               h_factor=3.0)

        # Berrima reflectivities are corrupt for many scans
        #  -- prefer CPOL reflectivities (remove for 2 good Z datasets)
        grid_Berr.fields['DT']['data'] = grid_cpol.fields['DT']['data']

        # The analysis engine requires azimuth and elevation
        # to be part of the grid. This information is computed from
        # the grid geometry.
        grid_cpol = multidop.angles.add_azimuth_as_field(grid_cpol)
        grid_Berr = multidop.angles.add_azimuth_as_field(grid_Berr)
        grid_cpol = multidop.angles.add_elevation_as_field(grid_cpol)
        grid_Berr = multidop.angles.add_elevation_as_field(grid_Berr)

        cpol_grid_name = (time_procedures.out_data_path + 'cpol/cpol_' +
                          year_str + month_str + day_str + hour_str +
                          minute_str + '.nc')

        berr_grid_name = (time_procedures.out_data_path + 'berr/berr_' +
                          year_str + month_str + day_str + hour_str +
                          minute_str + '.nc')

        # Save the input grids for later.
        pyart.io.write_grid(cpol_grid_name, grid_cpol)
        pyart.io.write_grid(berr_grid_name, grid_Berr)

        # Load previous time period for storm motion
        # (use 0 if no previous frame)
        try:
            Radar_prev = get_radar_from_cpol(ten_minutes_ago)

            print('Calculating storm motion....')
            texture_field = pyart.filters.calculate_velocity_texture(
                Radar_prev, vel_field=cpol_vel_field)

            print('Gridding previous frame...')
            cp = deepcopy(Radar_prev.fields['corrected_reflectivity']['data'])
            cp = np.ma.masked_where(texture_field['data'] > 4, cp)
            Radar_prev.add_field_like('corrected_reflectivity',
                                      'DT',
                                      cp,
                                      replace_existing=True)
            grid_prev = time_procedures.grid_radar(Radar_prev,
                                                   origin=origin,
                                                   xlim=(-60000, 60000),
                                                   ylim=(-50000, 30000),
                                                   fields=['DT'],
                                                   zlim=(500, 20000),
                                                   grid_shape=(40, 121, 81))
            (vt, ut) = pyart.retrieve.grid_displacement_pc(
                grid_prev,
                grid_cpol,
                'DT',
                9,
                return_value='corrected_velocity')
        except:
            (vt, ut) = (0, 0)

        # You don't have to define everything.
        # Most of these keywords are default values.
        # If you don't define something the program will
        # provide a default value.
        # Check parameters.py for what keyword default values are.
        calc_file_name = (time_procedures.out_data_path +
                          '/dda_files/cpol_calc' + year_str + month_str +
                          day_str + hour_str + minute_str + '.dda')

        frprmn_out_name = (time_procedures.out_data_path +
                           '/fprmn/frprmn_out' + year_str + month_str +
                           day_str + hour_str + minute_str + '.nc')
        localfile = tempfile.NamedTemporaryFile()

        # If sounding is available, favor constraint based on sounding
        # vs. data, otherwise favor data more
        if (use_sounding == 0):
            C8b = 0.0
            C1b = 1.0
            sounding_file_name = None
        else:
            C1b = 0.1
            C8b = 0.01

        pd = {
            'dir':
            './',
            'x': [-60000.0, 1000.0, 111],  # start, step, max=min+(steps-1)
            'y': [-50000.0, 1000.0, 81],
            'z': [500.0, 500.0, 40],
            'grid': [
                grid_cpol.origin_longitude['data'][0],
                grid_cpol.origin_latitude['data'][0], 50.0
            ],
            'files': [berr_grid_name, cpol_grid_name],
            'radar_names': ['Berrima', 'CPOL'],
            'refl':
            'DT',  # Name of reflectivity field.
            'vt':
            'VT',  # Name of velocity field.
            'bgfile':
            sounding_file_name,  # Name of sounding file
            'writeout':
            localfile.name,  # Name of output grid file
            'frprmn_out':
            frprmn_out_name,
            'min_cba':
            30.0,  # Minimum beam-crossing angle
            'calc_params':
            calc_file_name,  # .dda file for parameters
            # related to minimization
            'anel':
            1,  # 0 = Boussinesq approximation  1 = anelastic
            'laplace':
            0,  # 0 = 1st order derivatives in smoothing, 1 = 2nd
            'read_dataweights':
            2,  # 0 = calculate data constraint weights,
            # 1 = read from file, 2 = weigh equally
            'max_dist':
            10.0,  # How much distance analysis and observational
            # grid must match in m
            'cutoff':
            0.0,  # Deny observations below this level (m)
            'UT':
            ut,  # U of prescribed storm motion vector
            'VT':
            vt,  # V of prescribed storm motion vector
            'output_error':
            0,  # 1 = output verification stats
            'weak_height':
            -1,  # Sounding height constraint weakened
            # > 10 dBZ below this height (-1 = off)
            'upper_bc':
            1,  # 1 = w = 0 as upper boundary cond., -1 = ignore
            'itmax_frprmn': [200, 10],  # max iterations in frprmn function
            'itmax_dbrent':
            200,  # max iterations in dbrent function
            'C1b':
            C1b,  # Data weighting factor
            'C2b':
            1500.0,  # Mass continuity weighting factor
            'C3b':
            0.0,  # Vorticity weighting factor
            'C4b':
            75.0,  # Horizontal smoothing factor
            'C5b':
            2.0,  # Vertical smoothing factor
            'C8b':
            C8b,  # Sounding factor
            'vary_weights':
            0,
            # Define filter with ONE of the following forms.
            # filter: none
            # filter: filter_frequency Leise nstep
            # filter: filter_frequency low-pass alpha
            'filter': ['60', 'Leise', '2'],
            """ Coverage values for various combinations of radars.
              Each line should provide the type of coverage value, radar count,
              radar names, and the value, in the following form:

                  cvg_(""|opt|sub)_(bg|fil): integer radar1 radar2 ... boolean

              Radars are identified by the OPAWS/OBAN file name with grid
              data for that radar. This must be just the base name,
              not the full path.

              For example:

                 cvg_opt_bg: SR1 SR2 1

              says that if SR1 SR2 both have data within max_dist meters
              of the point under consideration, and an optimal beam crossing
              angle, then the point will receive a coverage value of 1,
              i.e. point has coverage.

              "opt" means optimal beam crossing angle.
              "sub" means suboptimal beam crossing angle.
              "bg" means background coverage.
              "fil" means filter coverage.
              cvg_bg, cvg_fil, and sseq_trip do not require a radar count.
              (Beam crossing angle is meaningless with one radar,
               so there is no opt or sub)
              If this file is being used, coverage values must be provided
              for all combinations of radars. """
            'cvg_opt_bg': [1, 1, 0],
            'cvg_sub_bg': [1, 1, 0],
            'cvg_opt_fil': [0, 0, 0],
            'cvg_sub_fil': [1, 1, 0],
            'cvg_bg': [1, 1, 0],
            'cvg_fil': [0, 0, 0],
            'sseq_trip': [1.0, 1.0, 0.0]
        }
        dda_file_name = (time_procedures.out_data_path +
                         '/dda_files/cpol_test' + year_str + month_str +
                         day_str + hour_str + minute_str + '.dda')

        pf = multidop.parameters.ParamFile(pd, dda_file_name)
        pf = multidop.parameters.CalcParamFile(pd, calc_file_name)

        # Unfortunately, text output from the analysis engine (DDA)
        # will not display until after the program completes.
        # Expect this step to take several minutes.
        bt = time.time()
        multidop.execute.run_command('./DDA ' + dda_file_name)
        print((time.time() - bt) / 60.0, 'minutes to process')

        # Baseline output is not CF or Py-ART compliant.
        # This function fixes that.
        # This is why we wrote the original output to a tempfile
        # that can be safely removed.
        # The final grid will have all wind solutions outside
        # the coverage region masked.
        try:
            final_grid = multidop.grid_io.make_new_grid([grid_cpol, grid_Berr],
                                                        localfile.name)
            final_grid.write(fname)
            localfile.close()
        except:
            print('Failed to write final grid!')
            return
    else:
        print('DDA grid already exists...skipping.')
Пример #2
0
def get_echotop_heights(cur_time):
    # First, get VISST Tb 
    cdf_data = get_visst_from_time(cur_time)

    # Load lat, lon, and time parameters - try statement for 24-hourly data, except for daily data
    Latitude = cdf_data.variables['latitude'][:]
    Longitude = cdf_data.variables['longitude'][:]
    Time = cdf_data.variables['image_times'][:]
    NumPixels = cdf_data.variables['image_numpix'][:]

    # Load brightness temperature
    IRBrightness = cdf_data.variables['temperature_ir'][:]
    CloudTopHeight = cdf_data.variables['cloud_top_height'][:]
    num_frames = len(NumPixels)
    
    echo_top_temps_visst = []
    echo_top_temps_cpol = []
    # For each time, find multidop grid that is within 10 minutes of scan
    for frame in range(0, num_frames):
        scan_hr, scan_min = seconds_to_midnight_to_hm(Time[frame])
        five_minutes_before = datetime(cur_time.year,
                                       cur_time.month,
                                       cur_time.day,
                                       int(scan_hr), 
                                       int(scan_min)) - timedelta(minutes=5)
        five_minutes_after = datetime(cur_time.year,
                                      cur_time.month,
                                      cur_time.day,
                                      int(scan_hr), 
                                      int(scan_min)) + timedelta(minutes=5)

        nearest_multidop = time_procedures.get_grid_times_cpol(
            five_minutes_before.year, five_minutes_before.month,                                           five_minutes_before.day,
                                         five_minutes_before.hour,
                                         five_minutes_before.minute,
                                         five_minutes_after.year,
                                         five_minutes_after.month,
                                         five_minutes_after.day,
                                         five_minutes_after.hour,
                                         five_minutes_after.minute)
        print(cur_time.year, cur_time.month, cur_time.day, scan_hr, scan_min)
        print(nearest_multidop)
        resolution = 1
        if(len(nearest_multidop) > 0):
            try:
                pyart_grid = time_procedures.get_grid_from_cpol(nearest_multidop[0])
            except:
                print('Py-ART grid not found!')
                continue
            texture = pyart_grid.fields['velocity_texture']['data']
            z = pyart_grid.fields['corrected_reflectivity']['data']
            grid_z = pyart_grid.point_z['data']
            grid_x = pyart_grid.point_x['data']
            grid_y = pyart_grid.point_y['data']
            dist = np.sqrt(np.square(grid_y) + np.square(grid_y))
            # Get sounding data
            one_day_ago = nearest_multidop[0]-timedelta(days=1, minutes=1)
            sounding_times = time_procedures.get_sounding_times(one_day_ago.year,
                                                                one_day_ago.month,
                                                                one_day_ago.day,
                                                                one_day_ago.hour,
                                                                one_day_ago.minute,
                                                                cur_time.year,
                                                                cur_time.month,
                                                                cur_time.day,
                                                                cur_time.hour,
                                                                cur_time.minute,
                                                                minute_interval=60)
            try:
                sounding_time = sounding_times[len(sounding_times)-1]
                Sounding_netcdf = time_procedures.get_sounding(sounding_time)
                base_time = Sounding_netcdf.variables['base_time'][:]
                alt = Sounding_netcdf.variables['alt'][:]
                temp = Sounding_netcdf.variables['tdry'][:]
                Tz = interpolate.interp1d(alt, temp+273.15, bounds_error=False)
                #grid_temp = Tz(grid_z)
            except (IndexError):
                print('Insufficient information from sounding...skipping!')
                continue
            print('Getting ETH...')
            array_shape = texture.shape
            # Get echo top heights from CPOL
            echo_top = np.zeros((array_shape[1],array_shape[2]))
            for i in range(0, array_shape[1]):
                for j in range(0, array_shape[2]):
                    in_cloud = np.where(np.logical_and(texture[:,i,j] < 3, z[:,i,j].mask == False))
                    if(len(in_cloud[0]) > 0):
                        in_cloud = in_cloud[0][-1]
                        echo_top[i,j] = grid_z[in_cloud,i,j]/1e3
                    else:
                        echo_top[i,j] = np.nan
                    
                    # Exclude values < 15 km from radar
                    if(math.sqrt(math.pow(grid_x[0,i,j],2) +
                       math.pow(grid_y[0,i,j],2)) < 15000):
                        echo_top[i,j] = np.nan


            # Get brightness temperatures from VISST over same grid
            # Load brightness temperature           
            cpol_latitude = -12.249166
            cpol_longitude = 131.04445
            
            # Get Lat and Lon for specific frame
            Lat = Latitude[(int(frame)*int(NumPixels[frame])):(int(frame+1)*int(NumPixels[frame])-1)]
            Lon = Longitude[(int(frame)*int(NumPixels[frame])):(int(frame+1)*int(NumPixels[frame])-1)]
            
            Lon_cpol = pyart_grid.point_longitude['data'][0]
            Lat_cpol = pyart_grid.point_latitude['data'][0]
            Lon_cpol = Lon_cpol.flatten()
            Lat_cpol = Lat_cpol.flatten()
            # Regrid data to multidop's grid
            x = pyart_grid.point_longitude['data'][0,
                                                   ::resolution,
                                                   ::resolution]
            y = pyart_grid.point_latitude['data'][0,
                                                  ::resolution,
                                                  ::resolution]
            echo_top = interpolate.griddata((Lon_cpol, Lat_cpol),
                                            echo_top.flatten(),
                                            (x,y))
            echo_top_temps_cpol.append(echo_top)                                            
            data = CloudTopHeight[(int(frame)*int(NumPixels[frame])):(int(frame+1)*int(NumPixels[frame])-1)]
            data_gridded = interpolate.griddata((Lon,Lat), data, (x,y))
            lat_gridded = interpolate.griddata((Lon,Lat), Lat, (x,y))
            lon_gridded = interpolate.griddata((Lon,Lat), Lon, (x,y))
            lat_bounds = np.logical_or(lat_gridded > cpol_latitude+1.5,
                                       lat_gridded < cpol_latitude-1.5)
            lon_bounds = np.logical_or(lon_gridded < cpol_longitude-1.5,
                                       lon_gridded > cpol_longitude+1.5)
            masked_region = np.logical_or(lat_bounds, lon_bounds)
            data_masked = np.ma.array(data_gridded)
            data_masked = np.ma.masked_where(masked_region, data_gridded)
            echo_top_temps_visst.append(data_masked)

    if(echo_top_temps_visst != []):
        dims = echo_top.shape
        return_array = np.zeros((7,len(echo_top_temps_visst), dims[0], dims[1]))
        echo_top_temps_visst = np.stack(echo_top_temps_visst)
        echo_top_temps_cpol = np.stack(echo_top_temps_cpol)
        return_array[0,:,:,:] = echo_top_temps_cpol
        return_array[1,:,:,:] = echo_top_temps_visst
        return_array[2,:,:,:] = z[1]
        return_array[3,:,:,:] = z[10]
        return_array[4,:,:,:] = z[20]
        return_array[5,:,:,:] = z[30]
        return_array[6,:,:,:] = dist[1]
        return return_array
    else:
        return []
Пример #3
0
def display_time(rad_time):
    import pyart
    import matplotlib
    import os
    os.chdir('/home/rjackson/cmdv-rrm-anl/code/')
    import time_procedures
    matplotlib.use('Agg')
    from matplotlib import pyplot as plt
    import os
    from datetime import timedelta
    from scipy import ndimage

    # Get a Radar object given a time period in the CPOL dataset
    data_path_cpol = '/lcrc/group/earthscience/radar/stage/radar_disk_two/cpol_rapic/'
    out_file_path = '/lcrc/group/earthscience/rjackson/quicklook_plots/cpol/'
    out_data_path = '/lcrc/group/earthscience/rjackson/cpol/'

    # CPOL in lassen or rapic?
    cpol_format = 0  # 0 = lassen, 1 = rapic

    year_str = "%04d" % rad_time.year
    month_str = "%02d" % rad_time.month
    day_str = "%02d" % rad_time.day
    hour_str = "%02d" % rad_time.hour
    minute_str = "%02d" % rad_time.minute
    second_str = "%02d" % rad_time.second

    try:
        radar = time_procedures.get_radar_from_cpol_cfradial(rad_time)
        if (rad_time.year > 2007):
            ref_field = 'Refl'
            vel_field = 'Vel'
            rhohv_field = 'RHOHV'
        else:
            ref_field = 'reflectivity'
            vel_field = 'velocity'
            rhohv_field = 'cross_correlation_ratio'

        if (radar.nsweeps == 1):
            return
        # Get sounding for 4DD intialization
        one_day_ago = rad_time - timedelta(days=1, minutes=1)
        sounding_times = time_procedures.get_sounding_times(one_day_ago.year,
                                                            one_day_ago.month,
                                                            one_day_ago.day,
                                                            one_day_ago.hour,
                                                            one_day_ago.minute,
                                                            rad_time.year,
                                                            rad_time.month,
                                                            rad_time.day,
                                                            rad_time.hour,
                                                            rad_time.minute,
                                                            minute_interval=60)

        sounding_time = sounding_times[len(sounding_times) - 1]
        Sounding_netcdf = time_procedures.get_sounding(sounding_time)
        # Convert timestamps to datetime format
        Time = Sounding_netcdf.variables['time_offset'][:]
        base_time = Sounding_netcdf.variables['base_time'][:]
        alt = Sounding_netcdf.variables['alt'][:]
        u = Sounding_netcdf.variables['u_wind'][:]
        v = Sounding_netcdf.variables['v_wind'][:]
        Sounding_netcdf.close()
        steps = np.floor(len(u) / 50)
        wind_profile = pyart.core.HorizontalWindProfile.from_u_and_v(
            alt[0::steps], u[0::steps], v[0::steps])

        ## 4DD expects speed, direction but HorizontalWindProfile outputs u_wind, v_wind
        wind_profile.u = wind_profile.u_wind
        wind_profile.v = wind_profile.v_wind
        # Filter clutter and noise from velocities

        nyq_Gunn = radar.instrument_parameters['nyquist_velocity']['data'][0]

        gatefilter = pyart.correct.GateFilter(radar)
        gatefilter.exclude_below(ref_field, 10)
        gatefilter.exclude_below(rhohv_field, 0.5)
        gatefilter.exclude_invalid(vel_field)
        gatefilter.exclude_masked(vel_field)
        gatefilter.exclude_invalid(ref_field)
        gatefilter = pyart.correct.despeckle_field(vel_field,
                                                   gatefilter=gatefilter,
                                                   size=10)
        radar.add_field('sim_velocity',
                        pyart.util.simulated_vel_from_profile(
                            radar, wind_profile),
                        replace_existing=True)

        corrected_velocity_4dd = pyart.correct.dealias_region_based(
            radar,
            vel_field=vel_field,
            keep_original=False,
            centered=True,
            interval_splits=6,
            gatefilter=gatefilter,
            skip_between_rays=2000,
            skip_along_ray=2000,
            rays_wrap_around=True,
            valid_min=-75,
            valid_max=75)

        print('Dealiasing done!')
        # Filter out regions based on deviation from sounding field to remove missed folds
        corr_vel = corrected_velocity_4dd['data']
        sim_velocity = radar.fields['sim_velocity']['data']
        diff = corr_vel - radar.fields['sim_velocity']['data']
        diff = diff / (
            radar.instrument_parameters['nyquist_velocity']['data'][1])
        radar.add_field_like(vel_field,
                             'corrected_velocity',
                             corrected_velocity_4dd['data'],
                             replace_existing=True)

        # Calculate gradient of field
        gradient = pyart.config.get_metadata('velocity')
        gradients = np.ma.array(
            np.gradient(radar.fields['corrected_velocity']['data']))
        gradients = np.ma.masked_where(gradients < -31000, gradients)
        gradients = gradients / (
            radar.instrument_parameters['nyquist_velocity']['data'][1])
        gradient['data'] = gradients[0]
        gradient[
            'standard_name'] = 'gradient_of_corrected_velocity_wrt_azimuth'
        gradient['units'] = 'meters per second per gate (divided by Vn)'
        radar.add_field('gradient_wrt_angle', gradient, replace_existing=True)

        gradient = pyart.config.get_metadata('velocity')
        gradient['data'] = gradients[1]
        gradient['standard_name'] = 'gradient_of_corrected_velocity_wrt_range'
        gradient['units'] = 'meters per second per gate (divided by Vn)'
        radar.add_field('gradient_wrt_range', gradient, replace_existing=True)

        # Adjust sweeps to match reference velocity
        ref_vdata = sim_velocity
        corr_vel = corrected_velocity_4dd['data']
        nyquist_interval = float(
            2 * radar.instrument_parameters['nyquist_velocity']['data'][1])
        for nsweep, sweep_slice in enumerate(radar.iter_slice()):
            sref = ref_vdata[sweep_slice]
            scorr = corr_vel[sweep_slice]
            mean_diff = (sref - scorr).mean()
            if (mean_diff > -100):
                global_fold = round(mean_diff / nyquist_interval)
                if global_fold != 0:
                    corr_vel[sweep_slice] += global_fold * nyquist_interval

        # Calculate difference from simulated velocity
        diff = radar.fields['corrected_velocity']['data'] - radar.fields[
            'sim_velocity']['data']
        diff = diff / (
            radar.instrument_parameters['nyquist_velocity']['data'][1])
        radar.add_field_like('sim_velocity',
                             'velocity_diff',
                             diff,
                             replace_existing=True)

        # Filter by gradient
        corr_vel = np.ma.masked_where(
            np.logical_or(
                np.logical_or(gradients[0] > 0.3, gradients[0] < -0.3),
                np.logical_or(diff > 2.0, diff < -0.9)), corr_vel)
        corrected_velocity_4dd['data'] = corr_vel
        radar.add_field_like(vel_field,
                             'corrected_velocity',
                             corrected_velocity_4dd['data'],
                             replace_existing=True)
        print('Filter!')
        # Save to Cf/Radial file
        time_procedures.write_radar_to_cpol_cfradial(radar, rad_time)

        out_path = (out_file_path + '/' + year_str + '/' + month_str + '/' +
                    day_str + '/')
        if (not os.path.exists(out_path)):
            try:
                os.makedirs(out_path)
            except:
                print('Not making directory')
        out_file = hour_str + minute_str + '.png'
        plt.figure(figsize=(7, 14))
        plt.subplot(211)
        display = pyart.graph.RadarMapDisplay(radar)
        display.plot_ppi(ref_field,
                         sweep=0,
                         cmap=pyart.graph.cm.NWSRef,
                         vmin=0,
                         vmax=70)
        plt.subplot(212)
        display = pyart.graph.RadarMapDisplay(radar)
        display.plot_ppi('corrected_velocity',
                         sweep=0,
                         cmap=pyart.graph.cm.NWSVel,
                         vmin=-30,
                         vmax=30)
        plt.savefig(out_path + out_file)

        plt.close()
    except:
        import sys
        print('Skipping corrupt time' + year_str + '-' + month_str + ' ' +
              hour_str + ':' + minute_str)
        print('Exception: ' + str(sys.exc_info()[0]) + str(sys.exc_info()[1]))
Пример #4
0
def display_time(rad_time):
    import pyart
    import matplotlib
    import sys
    sys.path.append('/home/rjackson/cmdv-rrm-anl/code/')
    import time_procedures
    matplotlib.use('Agg')
    from matplotlib import pyplot as plt
    import os
    from datetime import timedelta

    # Get a Radar object given a time period in the CPOL dataset
    data_path_cpol = '/lcrc/group/earthscience/radar/stage/radar_disk_two/cpol_rapic/'
    out_file_path = '/lcrc/group/earthscience/rjackson/quicklook_plots/berr/'
    out_data_path = '/lcrc/group/earthscience/rjackson/berr/'

    # CPOL in lassen or rapic?
    cpol_format = 1    # 0 = lassen, 1 = rapic
    
    def get_radar_from_cpol_rapic(time):
        from datetime import timedelta, datetime
        year_str = "%04d" % time.year
        month_str = "%02d" % time.month
        day_str = "%02d" % time.day
        hour_str = "%02d" % time.hour
        minute_str = "%02d" % time.minute
        second_str = "%02d" % time.second
        if(time.year == 2009 or (time.year == 2010 and
                                 time.month < 6)):
            dir_str = 'cpol_0910/rapic/'
        else:
            dir_str = 'cpol_1011/rapic/'

        file_name_str = (data_path_cpol +
                         dir_str + 
                         year_str +
                         month_str +
                         day_str +
                         hour_str +
                         minute_str +
                        'Gunn_Pt' +
                        '.rapic')
        radar = pyart.aux_io.read_radx(file_name_str)
        return radar 
    
    
 
    year_str = "%04d" % rad_time.year
    month_str = "%02d" % rad_time.month
    day_str = "%02d" % rad_time.day
    hour_str = "%02d" % rad_time.hour
    minute_str = "%02d" % rad_time.minute
    second_str = "%02d" % rad_time.second
  
    try:
        radar = time_procedures.get_radar_from_berr_cfradial(rad_time)
        if(cpol_format == 1):
            ref_field = 'Refl'
            vel_field = 'Vel'
        else:
            ref_field = 'reflectivity'
            vel_field = 'velocity'

        # Get sounding for 4DD intialization
        one_day_ago = rad_time-timedelta(days=1, minutes=1)
        sounding_times = time_procedures.get_sounding_times(one_day_ago.year,
                                                            one_day_ago.month,
                                                            one_day_ago.day,
                                                            one_day_ago.hour,
                                                            one_day_ago.minute,
                                                            rad_time.year,
                                                            rad_time.month,
                                                            rad_time.day,
                                                            rad_time.hour,
                                                            rad_time.minute,
                                                            minute_interval=60)
            
        sounding_time = sounding_times[len(sounding_times)-1]
        Sounding_netcdf = time_procedures.get_sounding(sounding_time)
        # Convert timestamps to datetime format
        Time = Sounding_netcdf.variables['time_offset'][:]
        base_time = Sounding_netcdf.variables['base_time'][:]
        alt = Sounding_netcdf.variables['alt'][:]
        u = Sounding_netcdf.variables['u_wind'][:]
        v = Sounding_netcdf.variables['v_wind'][:]
 
        Sounding_netcdf.close()
        steps = np.floor(len(u)/30)
        wind_profile = pyart.core.HorizontalWindProfile.from_u_and_v(alt[0::steps],
                                                                     u[0::steps],
                                                                     v[0::steps])
              
        ## 4DD expects speed, direction but HorizontalWindProfile outputs u_wind, v_wind
        wind_profile.u = wind_profile.u_wind
        wind_profile.v = wind_profile.v_wind
         
        # Dealias velocities
        gatefilter = pyart.correct.GateFilter(radar)
        gatefilter.exclude_below(ref_field, 5)
        gatefilter.exclude_invalid(vel_field)
        gatefilter.exclude_masked(vel_field)
        gatefilter.exclude_invalid(ref_field) 
        gatefilter = pyart.correct.despeckle_field(radar,
                                                   vel_field, 
                                                   gatefilter=gatefilter,
                                                   size=50)     
        radar.add_field('sim_velocity',
                        pyart.util.simulated_vel_from_profile(radar, 
                                                              wind_profile),
                        replace_existing = True)
        nyq = radar.instrument_parameters['nyquist_velocity']['data'][1]
        
        corrected_velocity_4dd = pyart.correct.dealias_region_based(radar,
                                                                    vel_field=vel_field,
                                                                    keep_original=False,
                                                                    centered=True,
                                                                    gatefilter=gatefilter,
                                                                    interval_splits=6,
                                                                    skip_between_rays=2200,
                                                                    skip_along_ray=2200,
                                                                    rays_wrap_around=True,
                                                                    valid_min=-75,
                                                                    valid_max=75,
                                                                    nyquist_velocity=nyq)
              
        # Filter out regions based on deviation from sounding field to remove missed folds                                                         
        corr_vel = corrected_velocity_4dd['data']
        sim_velocity = radar.fields['sim_velocity']['data']
        diff = corr_vel - sim_velocity
        diff = diff/(nyq)                   
        radar.add_field_like(vel_field, 
                             'corrected_velocity', 
  	                     corrected_velocity_4dd['data'],
	                     replace_existing=True)
 
        # Calculate gradient of field
        gradient = pyart.config.get_metadata('velocity')
        gradients = np.ma.array(np.gradient(radar.fields['corrected_velocity']['data']))
        gradients = np.ma.masked_where(gradients < -31000,gradients)
        gradients = gradients/nyq
        gradient['data'] = gradients[0]
        gradient['standard_name'] = 'gradient_of_corrected_velocity_wrt_azimuth'
        gradient['units'] = 'meters per second per gate (divided by Vn)'
        radar.add_field('gradient_wrt_angle',
                        gradient,
                        replace_existing=True)

        gradient = pyart.config.get_metadata('velocity')
        gradient['data'] = gradients[1]
        gradient['standard_name'] = 'gradient_of_corrected_velocity_wrt_range'
        gradient['units'] = 'meters per second per gate (divided by Vn)'
        radar.add_field('gradient_wrt_range',
                        gradient,
                        replace_existing=True)

        # Calculate difference from simulated velocity  
        radar.add_field_like('sim_velocity', 
                             'velocity_diff', 
                             diff, 
                             replace_existing=True) 
        
        # Adjust sweeps to match reference velocity
        ref_vdata = sim_velocity
        corr_vel = corrected_velocity_4dd['data']
        nyquist_interval = float(2*nyq)
        for nsweep, sweep_slice in enumerate(radar.iter_slice()):                                                                   
            sref = ref_vdata[sweep_slice]
            scorr = corr_vel[sweep_slice]
            mean_diff = (sref - scorr).mean()
            if(mean_diff > -100):
                global_fold = round(mean_diff / nyquist_interval)
                if global_fold != 0:
                    corr_vel[sweep_slice] += global_fold * nyquist_interval
            
        # Filter by devation from sounding  
        fifth_percentile = np.percentile(diff[diff > -100], 10) 
        ninety_fifth_percentile = np.percentile(diff[diff > -100], 90) 
        if(ninety_fifth_percentile > 2.5):
            ninety_fifth_percentile = 2.5
        if(fifth_percentile < -1.0):
            fifth_percentile = -1.0
        inds2 = np.logical_or(diff < fifth_percentile, diff > ninety_fifth_percentile)
        inds1 = np.logical_and(corr_vel < -nyq/2, sim_velocity > nyq/2)
        inds2 = np.logical_and(corr_vel > nyq/2, sim_velocity < -nyq/2)
        corr_vel = np.ma.masked_where(np.logical_or(inds1,inds2), corr_vel)        
        corrected_velocity_4dd['data'] = corr_vel
        radar.add_field_like(vel_field, 
                             'corrected_velocity', 
  	                     corrected_velocity_4dd['data'],
	                     replace_existing=True)
	gatefilter = pyart.correct.despeckle_field(radar,
                                                   'corrected_velocity', 
                                                   size=50)         
        time_procedures.write_radar_to_berr_cfradial(radar, rad_time)
        last_Radar = radar
        out_path = (out_file_path +
                    '/' +
                    year_str +
                    '/' +
                    month_str +
                    '/' +
                    day_str +
                    '/')
        if not os.path.exists(out_path):
            try:
                os.makedirs(out_path)
            except:
                print('Not making directory')
        out_file = hour_str + minute_str + '.png'
        plt.figure(figsize=(7,14))
        plt.subplot(211)
        display = pyart.graph.RadarMapDisplay(radar)
        display.plot_ppi(ref_field, 
                         sweep=0, 
                         cmap=pyart.graph.cm.NWSRef,
                         vmin=0, 
                         vmax=70)
        plt.subplot(212)
        display = pyart.graph.RadarMapDisplay(radar)
        display.plot_ppi('corrected_velocity', 
                         sweep=0,
	                 cmap=pyart.graph.cm.NWSVel,
                         vmin=-30, gatefilter=gatefilter,
                         vmax=30)
        plt.savefig(out_path + out_file)
        plt.close() 
    except:
        import sys
        print('Skipping corrupt time' +
              year_str + 
              '-' +
              month_str + 
              ' ' + 
              hour_str + 
              ':' +
              minute_str)
        print('Exception: ' + str(sys.exc_info()[0]) + str(sys.exc_info()[1]))
Пример #5
0
def display_time(rad_date):
    import pyart
    import matplotlib
    import sys
    sys.path.append('/home/rjackson/cmdv-rrm-anl/code/')
    import time_procedures
    matplotlib.use('Agg')
    from matplotlib import pyplot as plt
    import os
    from datetime import timedelta

    # Get a Radar object given a time period in the CPOL dataset
    data_path_cpol = '/lcrc/group/earthscience/radar/stage/radar_disk_two/cpol_rapic/'
    out_file_path = '/lcrc/group/earthscience/rjackson/quicklook_plots/berr/'
    out_data_path = '/lcrc/group/earthscience/rjackson/berr/'

    # CPOL in lassen or rapic?
    cpol_format = 1  # 0 = lassen, 1 = rapic

    def get_radar_from_cpol_rapic(time):
        from datetime import timedelta, datetime
        year_str = "%04d" % time.year
        month_str = "%02d" % time.month
        day_str = "%02d" % time.day
        hour_str = "%02d" % time.hour
        minute_str = "%02d" % time.minute
        second_str = "%02d" % time.second
        if (time.year == 2009 or (time.year == 2010 and time.month < 6)):
            dir_str = 'cpol_0910/rapic/'
        else:
            dir_str = 'cpol_1011/rapic/'

        file_name_str = (data_path_cpol + dir_str + year_str + month_str +
                         day_str + hour_str + minute_str + 'Gunn_Pt' +
                         '.rapic')
        radar = pyart.aux_io.read_radx(file_name_str)
        return radar

    one_day_later = rad_date + timedelta(days=1)
    times, dates = time_procedures.get_radar_times_berr_cfradial(
        rad_date.year,
        rad_date.month,
        rad_date.day,
        1,
        1,
        one_day_later.year,
        one_day_later.month,
        one_day_later.day,
        0,
        1,
    )
    print(times)
    for rad_time in times:
        year_str = "%04d" % rad_time.year
        month_str = "%02d" % rad_time.month
        day_str = "%02d" % rad_time.day
        hour_str = "%02d" % rad_time.hour
        minute_str = "%02d" % rad_time.minute
        second_str = "%02d" % rad_time.second

        # Check to see if Cf/Radial file already exists...
        out_path = (out_data_path + '/' + year_str + '/' + month_str + '/' +
                    day_str + '/')
        if not os.path.exists(out_path):
            os.makedirs(out_path)

        out_file = ('BerrimaVol' + year_str + month_str + day_str + hour_str +
                    minute_str + second_str + '_deal.cf')

        if (not os.path.isfile(out_file)):
            try:
                radar = time_procedures.get_radar_from_berr_cfradial(rad_time)
                if (not 'last_Radar' in locals()):
                    last_Radar = radar
                if (cpol_format == 1):
                    ref_field = 'Refl'
                    vel_field = 'Vel'
                else:
                    ref_field = 'reflectivity'
                    vel_field = 'velocity'

                # Get sounding for 4DD intialization
                one_day_ago = rad_time - timedelta(days=1, minutes=1)
                sounding_times = time_procedures.get_sounding_times(
                    one_day_ago.year,
                    one_day_ago.month,
                    one_day_ago.day,
                    one_day_ago.hour,
                    one_day_ago.minute,
                    rad_time.year,
                    rad_time.month,
                    rad_time.day,
                    rad_time.hour,
                    rad_time.minute,
                    minute_interval=60)

                sounding_time = sounding_times[len(sounding_times) - 1]
                Sounding_netcdf = time_procedures.get_sounding(sounding_time)

                # Convert timestamps to datetime format
                Time = Sounding_netcdf.variables['time_offset'][:]
                base_time = Sounding_netcdf.variables['base_time'][:]
                alt = Sounding_netcdf.variables['alt'][:]
                u = Sounding_netcdf.variables['u_wind'][:]
                v = Sounding_netcdf.variables['v_wind'][:]

                Sounding_netcdf.close()
                steps = np.floor(len(u) / 70)
                wind_profile = pyart.core.HorizontalWindProfile.from_u_and_v(
                    alt[0::steps], u[0::steps], v[0::steps])

                ## 4DD expects speed, direction but HorizontalWindProfile outputs u_wind, v_wind
                wind_profile.u = wind_profile.u_wind
                wind_profile.v = wind_profile.v_wind

                print(wind_profile)
                # Dealias velocities
                gatefilter = pyart.correct.despeckle.despeckle_field(
                    radar, vel_field)
                gatefilter.exclude_below(ref_field, 0)

                vels = pyart.correct.dealias._create_rsl_volume(radar,
                                                                'Vel',
                                                                0,
                                                                -9999.0,
                                                                excluded=None)
                for i in range(0, 17):
                    sweep = vels.get_sweep(i)
                    ray0 = sweep.get_ray(0)
                    ray50 = sweep.get_ray(50)
                    diff = ray0.azimuth - ray50.azimuth
                    if (diff > 180.0):
                        diff = 360.0 - diff
                    if (abs(diff) / 50.0 < 0.8):
                        print('Corrupt azimuthal angle data....skipping file!')
                        raise Exception('Corrupt azimuthal angles!')

                #corrected_velocity_4dd = pyart.correct.dealias_region_based(radar,
                #                                                            vel_field=vel_field,
                #                                                            gatefilter=gatefilter,
                #                                                            keep_original=False,
                #                                                            centered=True,
                #                                                            skip_between_rays=0,
                #                                                            skip_along_ray=0,
                #                                                            rays_wrap_around=True,
                #                                                            valid_min=-75,
                #                                                            valid_max=75)
                if (last_Radar.nsweeps == radar.nsweeps
                        and not last_Radar == radar):
                    try:
                        corrected_velocity_4dd = pyart.correct.dealias_fourdd(
                            radar,
                            vel_field=vel_field,
                            keep_original=False,
                            last_Radar=radar,
                            filt=1,
                            sign=-1)
                    except:
                        corrected_velocity_4dd = pyart.correct.dealias_fourdd(
                            radar,
                            vel_field=vel_field,
                            keep_original=False,
                            filt=1,
                            sonde_profile=wind_profile,
                        )
                else:
                    corrected_velocity_4dd = pyart.correct.dealias_fourdd(
                        radar,
                        vel_field=vel_field,
                        keep_original=False,
                        filt=1,
                        sonde_profile=wind_profile,
                    )

                # Filter out regions based on deviation from sounding field to remove missed folds
                corr_vel = corrected_velocity_4dd['data']
                sim_velocity = radar.fields['sim_velocity']['data']
                diff = corr_vel - radar.fields['sim_velocity']['data']
                diff = diff / (
                    radar.instrument_parameters['nyquist_velocity']['data'][1])
                radar.add_field_like(vel_field,
                                     'corrected_velocity',
                                     corrected_velocity_4dd['data'],
                                     replace_existing=True)

                # Calculate gradient of field
                gradient = pyart.config.get_metadata('velocity')
                gradients = np.ma.array(
                    np.gradient(radar.fields['corrected_velocity']['data']))
                gradients = np.ma.masked_where(gradients < -31000, gradients)
                gradients = gradients / (
                    radar.instrument_parameters['nyquist_velocity']['data'][1])
                gradient['data'] = gradients[0]
                gradient[
                    'standard_name'] = 'gradient_of_corrected_velocity_wrt_azimuth'
                gradient[
                    'units'] = 'meters per second per gate (divided by Vn)'
                radar.add_field('gradient_wrt_angle',
                                gradient,
                                replace_existing=True)

                gradient = pyart.config.get_metadata('velocity')
                gradient['data'] = gradients[1]
                gradient[
                    'standard_name'] = 'gradient_of_corrected_velocity_wrt_range'
                gradient[
                    'units'] = 'meters per second per gate (divided by Vn)'
                radar.add_field('gradient_wrt_range',
                                gradient,
                                replace_existing=True)

                # Filter by gradient
                corr_vel = corrected_velocity_4dd['data']
                corr_vel = np.ma.masked_where(
                    np.logical_or(gradients[0] > 0.3, gradients[0] < -0.3),
                    corr_vel)

                corrected_velocity_4dd['data'] = corr_vel
                radar.add_field_like(vel_field,
                                     'corrected_velocity',
                                     corrected_velocity_4dd['data'],
                                     replace_existing=True)

                time_procedures.write_radar_to_berr_cfradial(radar, rad_time)
                last_Radar = radar
                out_path = (out_file_path + '/' + year_str + '/' + month_str +
                            '/' + day_str + '/')
                if not os.path.exists(out_path):
                    os.makedirs(out_path)

                out_file = hour_str + minute_str + '.png'
                plt.figure(figsize=(7, 14))
                plt.subplot(211)

                display = pyart.graph.RadarMapDisplay(radar)
                display.plot_ppi(ref_field,
                                 sweep=0,
                                 cmap=pyart.graph.cm.NWSRef,
                                 vmin=0,
                                 vmax=70)
                plt.subplot(212)
                display = pyart.graph.RadarMapDisplay(radar)
                display.plot_ppi('corrected_velocity',
                                 sweep=0,
                                 cmap=pyart.graph.cm.NWSVel,
                                 vmin=-30,
                                 vmax=30)
                plt.savefig(out_path + out_file)
                plt.close()
            except:
                import sys
                print('Skipping corrupt time' + year_str + '-' + month_str +
                      ' ' + hour_str + ':' + minute_str)
                print('Exception: ' + str(sys.exc_info()[0]) +
                      str(sys.exc_info()[1]))
def display_time(rad_date):
    import sys
    sys.path.append('/home/rjackson/cmdv-rrm-anl/code/')

    # Get a Radar object given a time period in the CPOL dataset
    data_path_cpol = '/lcrc/group/earthscience/radar/stage/radar_disk_two/cpol_rapic/'
    out_file_path = '/lcrc/group/earthscience/rjackson/quicklook_plots/berr/'
    out_data_path = '/lcrc/group/earthscience/rjackson/berr/'

    # CPOL in lassen or rapic?
    cpol_format = 1  # 0 = lassen, 1 = rapic

    def get_radar_from_cpol_rapic(time):
        from datetime import timedelta, datetime
        year_str = "%04d" % time.year
        month_str = "%02d" % time.month
        day_str = "%02d" % time.day
        hour_str = "%02d" % time.hour
        minute_str = "%02d" % time.minute
        second_str = "%02d" % time.second
        if (time.year == 2009 or (time.year == 2010 and time.month < 6)):
            dir_str = 'cpol_0910/rapic/'
        else:
            dir_str = 'cpol_1011/rapic/'

        file_name_str = (data_path_cpol + dir_str + year_str + month_str +
                         day_str + hour_str + minute_str + 'Gunn_Pt' +
                         '.rapic')
        radar = pyart.aux_io.read_radx(file_name_str)
        return radar

    one_day_later = rad_date + timedelta(days=1)
    times, dates = time_procedures.get_radar_times_berr_cfradial(
        rad_date.year,
        rad_date.month,
        rad_date.day,
        0,
        1,
        one_day_later.year,
        one_day_later.month,
        one_day_later.day,
        0,
        2,
    )
    print(times)
    for rad_time in times:
        year_str = "%04d" % rad_time.year
        month_str = "%02d" % rad_time.month
        day_str = "%02d" % rad_time.day
        hour_str = "%02d" % rad_time.hour
        minute_str = "%02d" % rad_time.minute
        second_str = "%02d" % rad_time.second

        # Check to see if Cf/Radial file already exists...
        out_path = (out_data_path + '/' + year_str + '/' + month_str + '/' +
                    day_str + '/')
        if not os.path.exists(out_path):
            os.makedirs(out_path)

        out_file = ('BerrimaVol' + year_str + month_str + day_str + hour_str +
                    minute_str + second_str + '_deal.cf')

        if (not os.path.isfile(out_file)):
            try:
                radar = time_procedures.get_radar_from_berr_cfradial(rad_time)
                if (not 'last_Radar' in locals()):
                    last_Radar = radar
                if (cpol_format == 1):
                    ref_field = 'Refl'
                    vel_field = 'Vel'
                else:
                    ref_field = 'reflectivity'
                    vel_field = 'velocity'

                # Get sounding for 4DD intialization
                one_day_ago = rad_time - timedelta(days=1, minutes=1)
                sounding_times = time_procedures.get_sounding_times(
                    one_day_ago.year,
                    one_day_ago.month,
                    one_day_ago.day,
                    one_day_ago.hour,
                    one_day_ago.minute,
                    rad_time.year,
                    rad_time.month,
                    rad_time.day,
                    rad_time.hour,
                    rad_time.minute,
                    minute_interval=60)

                sounding_time = sounding_times[len(sounding_times) - 1]
                Sounding_netcdf = time_procedures.get_sounding(sounding_time)

                # Convert timestamps to datetime format
                Time = Sounding_netcdf.variables['time_offset'][:]
                base_time = Sounding_netcdf.variables['base_time'][:]
                alt = Sounding_netcdf.variables['alt'][:]
                u = Sounding_netcdf.variables['u_wind'][:]
                v = Sounding_netcdf.variables['v_wind'][:]

                Sounding_netcdf.close()
                steps = int(np.floor(len(u) / 70))
                wind_profile = pyart.core.HorizontalWindProfile.from_u_and_v(
                    alt[0::steps], u[0::steps], v[0::steps])

                ## 4DD expects speed, direction but HorizontalWindProfile outputs u_wind, v_wind
                wind_profile.u = wind_profile.u_wind
                wind_profile.v = wind_profile.v_wind
                sim_vel = pyart.util.simulated_vel_from_profile(
                    radar, wind_profile, sim_vel_field=vel_field)
                radar.add_field('sim_velocity', sim_vel, replace_existing=True)

                # Dealias velocities
                gatefilter = pyart.correct.GateFilter(radar)

                gatefilter.exclude_below(ref_field, 0)
                gatefilter.exclude_masked(vel_field)
                gatefilter.exclude_invalid(vel_field)
                gatefilter.exclude_masked(ref_field)
                gatefilter.exclude_invalid(ref_field)
                gatefilter.exclude_above(ref_field, 80)
                gatefilter.exclude_below(vel_field, -75)
                gatefilter.exclude_above(vel_field, 75)

                texture = pyart.retrieve.calculate_velocity_texture(
                    radar, vel_field=vel_field, wind_size=4)
                radar.add_field('velocity_texture',
                                texture,
                                replace_existing=True)
                gatefilter.exclude_above('velocity_texture', 2)
                gatefilter = pyart.correct.despeckle.despeckle_field(
                    radar, vel_field, gatefilter=gatefilter, size=25)

                # Calculate result from region based dealiasing
                corrected_velocity_region = pyart.correct.dealias_region_based(
                    radar,
                    vel_field=vel_field,
                    keep_original=False,
                    centered=True,
                    gatefitler=gatefilter,
                    interval_splits=6,
                    skip_between_rays=2000,
                    skip_along_ray=2000,
                    rays_wrap_around=True,
                    valid_min=-75,
                    valid_max=75)
                gfilter = gatefilter.gate_excluded
                vels = deepcopy(corrected_velocity_region['data'])
                vels_uncorr = radar.fields[vel_field]['data']
                sim_vels = radar.fields['sim_velocity']['data']
                v_nyq_vel = radar.instrument_parameters['nyquist_velocity'][
                    'data'][0]
                region_means = []
                regions = np.zeros(vels.shape)
                for nsweep, sweep_slice in enumerate(radar.iter_slice()):
                    sfilter = gfilter[sweep_slice]
                    vels_slice = vels[sweep_slice]
                    svels_slice = sim_vels[sweep_slice]
                    vels_uncorrs = vels_uncorr[sweep_slice]
                    valid_sdata = vels_uncorrs[~sfilter]
                    int_splits = pyart.correct.region_dealias._find_sweep_interval_splits(
                        v_nyq_vel, 3, valid_sdata, nsweep)
                    regions[
                        sweep_slice], nfeatures = pyart.correct.region_dealias._find_regions(
                            vels_uncorrs, sfilter, limits=int_splits)

                    ## Minimize cost function that is sum of difference between regions and
                    def cost_function(nyq_vector):
                        cost = 0
                        i = 0
                        for reg in np.unique(regions[sweep_slice]):
                            add_value = np.abs(
                                np.ma.mean(vels_slice[regions[sweep_slice] ==
                                                      reg]) +
                                nyq_vector[i] * 2 * v_nyq_vel - np.ma.mean(
                                    svels_slice[regions[sweep_slice] == reg]))

                            if (np.isfinite(add_value)):
                                cost += add_value
                            i = i + 1
                        return cost

                    def gradient(nyq_vector):
                        gradient_vector = np.zeros(len(nyq_vector))
                        i = 0
                        for reg in np.unique(regions[sweep_slice]):
                            add_value = (
                                np.ma.mean(
                                    vels_slice[regions[sweep_slice] == reg]) +
                                nyq_vector[i] * 2 * v_nyq_vel - np.ma.mean(
                                    svels_slice[regions[sweep_slice] == reg]))
                            if (add_value > 0):
                                gradient_vector[i] = 2 * v_nyq_vel
                            else:
                                gradient_vector[i] = -2 * v_nyq_vel
                            i = i + 1
                        return gradient_vector

                    bounds_list = [
                        (x, y)
                        for (x, y) in zip(-5 * np.ones(nfeatures + 1), 5 *
                                          np.ones(nfeatures + 1))
                    ]
                    nyq_adjustments = fmin_l_bfgs_b(cost_function,
                                                    np.zeros((nfeatures + 1)),
                                                    disp=True,
                                                    fprime=gradient,
                                                    bounds=bounds_list,
                                                    maxiter=30)
                    i = 0
                    for reg in np.unique(regions[sweep_slice]):
                        vels_slice[regions[sweep_slice] ==
                                   reg] += v_nyq_vel * np.round(
                                       nyq_adjustments[0][i])
                        i = i + 1
                        vels[sweep_slice] = vels_slice

                corrected_velocity_region['data'] = vels

                radar.add_field_like(vel_field,
                                     'corrected_velocity',
                                     corrected_velocity_region['data'],
                                     replace_existing=True)

                time_procedures.write_radar_to_berr_cfradial(radar, rad_time)
                last_Radar = radar
                out_path = (out_file_path + '/' + year_str + '/' + month_str +
                            '/' + day_str + '/')
                if not os.path.exists(out_path):
                    os.makedirs(out_path)

                out_file = hour_str + minute_str + '.png'
                plt.figure(figsize=(7, 21))
                plt.subplot(411)

                display = pyart.graph.RadarMapDisplay(radar)
                display.plot_ppi(ref_field,
                                 sweep=0,
                                 cmap=pyart.graph.cm.NWSRef,
                                 vmin=0,
                                 vmax=70,
                                 gatefilter=gatefilter)
                plt.subplot(412)
                display.plot_ppi('corrected_velocity',
                                 sweep=0,
                                 cmap=pyart.graph.cm.NWSVel,
                                 vmin=-30,
                                 vmax=30,
                                 gatefilter=gatefilter)
                plt.subplot(413)
                display.plot_ppi('sim_velocity',
                                 sweep=0,
                                 cmap=pyart.graph.cm.NWSVel,
                                 vmin=-30,
                                 vmax=30)
                plt.savefig(out_path + out_file)
                plt.subplot(414)
                display.plot_ppi('velocity_texture',
                                 sweep=0,
                                 cmap=pyart.graph.cm.NWSVel,
                                 vmin=0,
                                 vmax=10)
                plt.savefig(out_path + out_file)
                plt.close()
            except:
                import sys
                print('Skipping corrupt time' + year_str + '-' + month_str +
                      ' ' + hour_str + ':' + minute_str)
                print('Exception: ' + str(sys.exc_info()[0]) +
                      str(sys.exc_info()[1]))
                print(str(sys.exc_info()[2].tb_lineno))
def display_time(rad_date):
    import pyart
    import matplotlib
    import sys
    sys.path.append('/home/rjackson/cmdv-rrm-anl/code/')
    import time_procedures
    matplotlib.use('Agg')
    from matplotlib import pyplot as plt
    import os
    from datetime import timedelta
    from scipy.stats import gaussian_kde
    from scipy.signal import argrelextrema

    # Get a Radar object given a time period in the CPOL dataset
    data_path_cpol = '/lcrc/group/earthscience/radar/stage/radar_disk_two/cpol_rapic/'
    out_file_path = '/lcrc/group/earthscience/rjackson/quicklook_plots/'
    out_data_path = '/lcrc/group/earthscience/rjackson/cpol/'

    # CPOL in lassen or rapic?
    cpol_format = 1    # 0 = lassen, 1 = rapic
    
    def get_radar_from_cpol_rapic(time):
        from datetime import timedelta, datetime
        year_str = "%04d" % time.year
        month_str = "%02d" % time.month
        day_str = "%02d" % time.day
        hour_str = "%02d" % time.hour
        minute_str = "%02d" % time.minute
        second_str = "%02d" % time.second
        if(time.year == 2009 or (time.year == 2010 and
                                 time.month < 6)):
            dir_str = 'cpol_0910/rapic/'
        else:
            dir_str = 'cpol_1011/rapic/'

        file_name_str = (data_path_cpol +
                         dir_str + 
                         year_str +
                         month_str +
                         day_str +
                         hour_str +
                         minute_str +
                        'Gunn_Pt' +
                        '.rapic')
        radar = pyart.aux_io.read_radx(file_name_str)
        return radar 

    def kde_scipy(x, x_grid, bandwidth=0.2, **kwargs):
        """Kernel Density Estimation with Scipy"""
        # Note that scipy weights its bandwidth by the covariance of the
        # input data.  To make the results comparable to the other methods,
        # we divide the bandwidth by the sample standard deviation here.
        kde = gaussian_kde(x, bw_method=bandwidth / x.std(ddof=1), **kwargs)
        return kde.evaluate(x_grid)
    
    one_day_later = rad_date+timedelta(days=1)
    times, dates = time_procedures.get_radar_times_cpol_cfradial(rad_date.year, 
                                                                 rad_date.month,
                                                                 rad_date.day,
                                                                 1, 
                                                                 1,
                                                                 one_day_later.year, 
                                                                 one_day_later.month,
                                                                 one_day_later.day,
                                                                 0, 
                                                                 1,
                                                                 )
    print(times)
    for rad_time in times:
        year_str = "%04d" % rad_time.year
        month_str = "%02d" % rad_time.month
        day_str = "%02d" % rad_time.day
        hour_str = "%02d" % rad_time.hour
        minute_str = "%02d" % rad_time.minute
        second_str = "%02d" % rad_time.second

        # Check to see if Cf/Radial file already exists...
        out_path = (out_data_path +
	           '/' +
                    year_str +
                    '/' +
	            month_str +
                    '/' +
                    day_str +
	            '/')
        if not os.path.exists(out_path):
            os.makedirs(out_path)

        try:  
            radar = time_procedures.get_radar_from_cpol_cfradial(rad_time)
            if(cpol_format == 1):
                ref_field = 'Refl'
                vel_field = 'Vel'
            else:
                ref_field = 'reflectivity'
                vel_field = 'velocity'

            # Get sounding for 4DD intialization
            one_day_ago = rad_time-timedelta(days=1, minutes=1)
            sounding_times = time_procedures.get_sounding_times(one_day_ago.year,
                                                                one_day_ago.month,
                                                                one_day_ago.day,
                                                                one_day_ago.hour,
                                                                one_day_ago.minute,
                                                                rad_time.year,
                                                                rad_time.month,
                                                                rad_time.day,
                                                                rad_time.hour,
                                                                rad_time.minute,
                                                                minute_interval=60)
             
            sounding_time = sounding_times[len(sounding_times)-1]
            Sounding_netcdf = time_procedures.get_sounding(sounding_time)
            # Convert timestamps to datetime format
            Time = Sounding_netcdf.variables['time_offset'][:]
            base_time = Sounding_netcdf.variables['base_time'][:]
            alt = Sounding_netcdf.variables['alt'][:]
            u = Sounding_netcdf.variables['u_wind'][:]
            v = Sounding_netcdf.variables['v_wind'][:]
            Sounding_netcdf.close()
            steps = np.floor(len(u)/50)
            wind_profile = pyart.core.HorizontalWindProfile.from_u_and_v(alt[0::steps],
                                                                         u[0::steps],
                                                                         v[0::steps])
               
            ## 4DD expects speed, direction but HorizontalWindProfile outputs u_wind, v_wind
            wind_profile.u = wind_profile.u_wind
            wind_profile.v = wind_profile.v_wind
               
            print(wind_profile)
            # Dealias velocities
            gatefilter = pyart.correct.despeckle.despeckle_field(radar,
                                                                 vel_field)
            gatefilter.exclude_below(ref_field, 0)
            vels = pyart.correct.dealias._create_rsl_volume(radar, 
                                                'Vel', 
                                                0, 
                                                -9999.0, 
                                                excluded=None)
            for i in range(0,17):
                sweep = vels.get_sweep(i)
                ray0 = sweep.get_ray(0)
                ray50 = sweep.get_ray(50)
                diff = ray0.azimuth-ray50.azimuth 
                if(diff > 180.0):
                    diff = 360.0 - diff    
                if(abs(diff)/50.0 < 0.8):
                    print('Corrupt azimuthal angle data....skipping file!')
                    raise Exception('Corrupt azimuthal angles!')     
                
            #corrected_velocity_4dd = pyart.correct.dealias_region_based(radar,
            #                                                            vel_field=vel_field,
            #                                                            gatefilter=gatefilter,
            #                                                            keep_original=False,
            #                                                            centered=True,
            #                                                            skip_between_rays=0,
            #                                                            skip_along_ray=0,
            #                                                            rays_wrap_around=True,
            #                                                            valid_min=-75,
            #                                                            valid_max=75)
            if(last_Radar.nsweeps == radar.nsweeps):
                    try:
                        corrected_velocity_4dd = pyart.correct.dealias_fourdd(radar,
                                                                              vel_field=vel_field,
                                                                              keep_original=False,
                                                                              last_Radar=radar,
                                                                              filt=1,
                                                                              sonde_profile=wind_profile,
                                                                              )
                    except:
                        corrected_velocity_4dd = pyart.correct.dealias_fourdd(radar,
                                                                              vel_field=vel_field,
                                                                              keep_original=False,
                                                                              filt=1,
                                                                              sonde_profile=wind_profile,
                                                                              )
            else:
                corrected_velocity_4dd = pyart.correct.dealias_fourdd(radar,
                                                                      vel_field=vel_field,
                                                                      keep_original=False,
                                                                      filt=1,
                                                                      sonde_profile=wind_profile,
                                                                      )

            radar.add_field_like(vel_field, 
                             'corrected_velocity', 
  	                     corrected_velocity_4dd['data'],
	                     replace_existing=True)
 
            # Calculate gradient of field
            gradient = pyart.config.get_metadata('velocity')
            gradients = np.ma.array(np.gradient(radar.fields['corrected_velocity']['data']))
            gradients = np.ma.masked_where(gradients < -31000,gradients)
            gradients = gradients/(radar.instrument_parameters['nyquist_velocity']['data'][1])
            gradient['data'] = gradients[0]
            gradient['standard_name'] = 'gradient_of_corrected_velocity_wrt_azimuth'
            gradient['units'] = 'meters per second per gate (divided by Vn)'
            radar.add_field('gradient_wrt_angle',
                            gradient,
                            replace_existing=True)

            gradient = pyart.config.get_metadata('velocity')
            gradient['data'] = gradients[1]
            gradient['standard_name'] = 'gradient_of_corrected_velocity_wrt_range'
            gradient['units'] = 'meters per second per gate (divided by Vn)'
            radar.add_field('gradient_wrt_range',
                            gradient,
                            replace_existing=True)

            # Calculate difference from simulated velocity
            diff = radar.fields['corrected_velocity']['data'] - radar.fields['sim_velocity']['data']
            diff = diff/(radar.instrument_parameters['nyquist_velocity']['data'][1])     
            radar.add_field_like('sim_velocity', 
                                 'velocity_diff', 
                                 diff, 
                                 replace_existing=True)    
            
            # Filter by gradient   
            corr_vel = corrected_velocity_4dd['data']
            corr_vel = np.ma.masked_where(np.logical_or(gradients[0] > 0.3, 
                                                        gradients[0] < -0.3)),
                                          corr_vel)
            
            corrected_velocity_4dd['data'] = corr_vel
            radar.add_field_like(vel_field, 
                                 'corrected_velocity', 
  	                         corrected_velocity_4dd['data'],
	                         replace_existing=True)
            # Save to Cf/Radial file
            time_procedures.write_radar_to_cpol_cfradial(radar, rad_time)
            last_Radar = radar
            out_path = (out_file_path +
                        '/' +
                        year_str +
                        '/' +
                        month_str +
                        '/' +
                        day_str +
                        '/')
def display_time(rad_date):
    import pyart
    import matplotlib
    import sys
    sys.path.append('/home/rjackson/cmdv-rrm-anl/code/')
    import time_procedures
    matplotlib.use('Agg')
    from matplotlib import pyplot as plt
    import os
    from datetime import timedelta
    from scipy.stats import gaussian_kde
    from scipy.signal import argrelextrema
    from numba import jit

    # Get a Radar object given a time period in the CPOL dataset
    data_path_cpol = '/lcrc/group/earthscience/radar/stage/radar_disk_two/cpol_rapic/'
    out_file_path = '/lcrc/group/earthscience/rjackson/quicklook_plots/cpol/'
    out_data_path = '/lcrc/group/earthscience/rjackson/cpol/'

    # CPOL in lassen or rapic?
    cpol_format = 1  # 0 = lassen, 1 = rapic

    def get_radar_from_cpol_rapic(time):
        from datetime import timedelta, datetime
        year_str = "%04d" % time.year
        month_str = "%02d" % time.month
        day_str = "%02d" % time.day
        hour_str = "%02d" % time.hour
        minute_str = "%02d" % time.minute
        second_str = "%02d" % time.second
        if (time.year == 2009 or (time.year == 2010 and time.month < 6)):
            dir_str = 'cpol_0910/rapic/'
        else:
            dir_str = 'cpol_1011/rapic/'

        file_name_str = (data_path_cpol + dir_str + year_str + month_str +
                         day_str + hour_str + minute_str + 'Gunn_Pt' +
                         '.rapic')
        radar = pyart.aux_io.read_radx(file_name_str)
        return radar

    def extract_unmasked_data(radar, field, bad=-32768):
        """Simplify getting unmasked radar fields from Py-ART"""
        return deepcopy(radar.fields[field]['data'].filled(fill_value=bad))

    @jit(nopython=True, cache=True)
    def get_fold_position(the_phidp):

        tmp = the_phidp
        rth_pos = np.zeros((tmp.shape[0]), dtype=np.int32)
        for j in range(tmp.shape[0]):
            max_phidp = -32768
            for i in range(70, tmp.shape[1]):
                if the_phidp[j,
                             i] < max_phidp - 30.0 and the_phidp[j,
                                                                 i] > -1000.0:
                    rth_pos[j] = i
                    break
                if (the_phidp[j, i] > max_phidp):
                    max_phidp = the_phidp[j, i]
        return rth_pos

    @jit(nopython=True, cache=True)
    def unfold_phidp(the_phidp, rth_position):
        tmp = the_phidp
        for j in range(len(rth_position)):
            i = rth_position[j]
            if i == 0:
                continue
            else:
                tmp[j, i:] += 180
        return tmp

    @jit(cache=True)
    def refold_vdop(vdop_art, v_nyq_vel, rth_position):
        tmp = vdop_art
        for j in range(len(rth_position)):
            i = rth_position[j]
            if i == 0:
                continue
            else:
                tmp[j, i:] += v_nyq_vel

        pos = (vdop_art > v_nyq_vel)
        tmp[pos] = tmp[pos] - 2 * v_nyq_vel
        return tmp

    one_day_later = rad_date + timedelta(days=1)
    times, dates = time_procedures.get_radar_times_cpol_cfradial(
        rad_date.year,
        rad_date.month,
        rad_date.day,
        0,
        1,
        one_day_later.year,
        one_day_later.month,
        one_day_later.day,
        0,
        2,
    )

    print(times)
    rerun_multidop_list = open('/home/rjackson/grids_to_regenerate/folds' +
                               str(rad_date.year) + str(rad_date.month) +
                               str(rad_date.day),
                               mode='w+')
    for rad_time in times:
        year_str = "%04d" % rad_time.year
        month_str = "%02d" % rad_time.month
        day_str = "%02d" % rad_time.day
        hour_str = "%02d" % rad_time.hour
        minute_str = "%02d" % rad_time.minute
        second_str = "%02d" % rad_time.second

        # Check to see if Cf/Radial file already exists...
        out_path = (out_data_path + '/' + year_str + '/' + month_str + '/' +
                    day_str + '/')
        if not os.path.exists(out_path):
            os.makedirs(out_path)

        try:
            radar = time_procedures.get_radar_from_cpol_cfradial(rad_time)
            if (rad_time.year > 2007):
                ref_field = 'Refl'
                vel_field = 'Vel'
                ref_threshold = 0
                rhohv_field = 'RHOHV'
                phidp_field = 'PHIDP'
            else:
                ref_field = 'reflectivity'
                vel_field = 'velocity'
                ref_threshold = 5
                rhohv_field = 'cross_correlation_ratio'
                phidp_field = 'differential_phase'
            if (radar.nsweeps == 1):
                raise Exception('Radar only has one sweep!')
            if (not 'last_Radar' in locals()):
                last_Radar = radar

            # Get sounding for 4DD intialization
            one_day_ago = rad_time - timedelta(days=1, minutes=1)
            sounding_times = time_procedures.get_sounding_times(
                one_day_ago.year,
                one_day_ago.month,
                one_day_ago.day,
                one_day_ago.hour,
                one_day_ago.minute,
                rad_time.year,
                rad_time.month,
                rad_time.day,
                rad_time.hour,
                rad_time.minute,
                minute_interval=60)

            sounding_time = sounding_times[len(sounding_times) - 1]
            Sounding_netcdf = time_procedures.get_sounding(sounding_time)
            # Convert timestamps to datetime format
            Time = Sounding_netcdf.variables['time_offset'][:]
            base_time = Sounding_netcdf.variables['base_time'][:]
            alt = Sounding_netcdf.variables['alt'][:]
            u = Sounding_netcdf.variables['u_wind'][:]
            v = Sounding_netcdf.variables['v_wind'][:]
            Sounding_netcdf.close()
            steps = np.floor(len(u) / 50)
            wind_profile = pyart.core.HorizontalWindProfile.from_u_and_v(
                alt[0::steps], u[0::steps], v[0::steps])

            ## 4DD expects speed, direction but HorizontalWindProfile outputs u_wind, v_wind
            wind_profile.u = wind_profile.u_wind
            wind_profile.v = wind_profile.v_wind
            nyq = radar.instrument_parameters['nyquist_velocity']['data'][0]
            print(wind_profile)
            # Dealias velocities
            gatefilter = pyart.correct.GateFilter(radar)
            # Region based hangs before 2006 hangs on noise w/Z < 10 dBZ
            gatefilter.exclude_below(ref_field, ref_threshold)
            try:
                gatefilter.exclude_below(rhohv_field, 0.6)
            except KeyError:
                print('No rho HV data! Disabling Rho HV mask!')
            gatefilter.exclude_masked(vel_field)
            gatefilter.exclude_invalid(vel_field)
            gatefilter.exclude_masked(ref_field)
            gatefilter.exclude_invalid(ref_field)
            gatefilter.exclude_above(ref_field, 80)
            gatefilter.exclude_below(vel_field, -75)
            gatefilter.exclude_above(vel_field, 75)
            # Look for phiDP folds and correct velocities
            try:
                dz = radar.fields[ref_field]['data']
                dp = radar.fields[phidp_field]['data']
                r, azi = radar.range['data'], radar.azimuth['data']
                rng2d, az2d = np.meshgrid(radar.range['data'],
                                          radar.azimuth['data'])
                kdN, fdN, sdN = csu_kdp.calc_kdp_bringi(dp=dp,
                                                        dz=dz,
                                                        rng=rng2d / 1000.0,
                                                        thsd=24,
                                                        gs=250.0,
                                                        window=4)

                fdN = np.ma.MaskedArray(fdN)
                fdN[fdN == -32768] = np.ma.masked

                phidp_fold = deepcopy(fdN)
                phidp_fold[phidp_fold.mask == True] = np.nan
                rth = get_fold_position(phidp_fold)

                vdop_art = deepcopy(radar.fields[vel_field]['data'])
                v_nyq_vel = radar.instrument_parameters['nyquist_velocity'][
                    'data'][0]

                vdop_refolded = refold_vdop(vdop_art, v_nyq_vel, rth)
                radar.add_field_like(vel_field,
                                     'vdop_phidp_fold_corrected',
                                     vdop_refolded,
                                     replace_existing=True)
                radar.add_field_like(phidp_field,
                                     'phidp_bringi',
                                     phidp_fold,
                                     replace_existing=True)
                vel_field = 'vdop_phidp_fold_corrected'

                # Tell me if fold is within 60 km of CPOL and write to file (for multidop reprocessing purposes)
                is_fold = 0
                for fold_starts in radar.range['data'][rth]:
                    if (fold_starts > 13500 and fold_starts < 80000
                            and is_fold == 0):
                        print('Fold in DD domain at ' +
                              str(fold_starts / 1e3) + ' km')
                        print(rad_time.strftime('%m/%j/%Y %H:%M:%S'))
                        rerun_multidop_list.write(hour_str + ':' + minute_str +
                                                  '/n')
                        is_fold = 1
                gatefilter.exclude_masked('phidp_bringi')
            except:
                import sys
                exc_type, exc_obj, exc_tb = sys.exc_info()
                print('Cant run phiDP unfolding!')
                print('Exception: ' + str(sys.exc_info()[0]) +
                      str(sys.exc_info()[1]) + str(exc_tb.tb_lineno))

            print('Running 4DD...')
            vels = pyart.correct.dealias._create_rsl_volume(radar,
                                                            vel_field,
                                                            0,
                                                            -9999.0,
                                                            excluded=None)

            # Discontinuties in azimuthal angles due to corrupt data make 4DD segfault
            for i in range(0, radar.nsweeps - 1):
                sweep = vels.get_sweep(i)
                ray0 = sweep.get_ray(0)
                ray50 = sweep.get_ray(50)
                diff = ray0.azimuth - ray50.azimuth
                if (diff > 180.0):
                    diff = 360.0 - diff
                if (abs(diff) / 50.0 < 0.5):
                    print('Corrupt azimuthal angle data....skipping file!')
                    raise Exception('Corrupt azimuthal angles!')

            if (last_Radar.nsweeps == radar.nsweeps
                    and not last_Radar == radar):
                try:
                    corrected_velocity_4dd = pyart.correct.dealias_fourdd(
                        radar,
                        vel_field=vel_field,
                        keep_original=False,
                        filt=1,
                        sonde_profile=wind_profile,
                        gatefilter=gatefilter,
                    )
                except:
                    corrected_velocity_4dd = pyart.correct.dealias_fourdd(
                        radar,
                        vel_field=vel_field,
                        keep_original=False,
                        filt=1,
                        sonde_profile=wind_profile,
                        gatefilter=gatefilter,
                    )
            else:
                corrected_velocity_4dd = pyart.correct.dealias_fourdd(
                    radar,
                    vel_field=vel_field,
                    keep_original=False,
                    filt=1,
                    sonde_profile=wind_profile,
                    gatefilter=gatefilter,
                )
            print('Running region based dealiasing...')
            # Calculate result from region based dealiasing
            corrected_velocity_region = pyart.correct.dealias_region_based(
                radar,
                vel_field=vel_field,
                keep_original=False,
                centered=True,
                gatefilter=gatefilter,
                interval_splits=6,
                skip_between_rays=2000,
                skip_along_ray=2000,
                rays_wrap_around=True,
                valid_min=-75,
                valid_max=75,
                nyquist_velocity=nyq)
            corr_vel = corrected_velocity_4dd['data']
            difference = corr_vel / corrected_velocity_region['data']
            corr_vel = np.ma.masked_where(
                np.logical_or(difference > 1.05, difference < 0.95), corr_vel)
            corrected_velocity_4dd['data'] = corr_vel
            radar.add_field_like(vel_field,
                                 'corrected_velocity',
                                 corrected_velocity_4dd['data'],
                                 replace_existing=True)

            # Save to Cf/Radial file
            time_procedures.write_radar_to_cpol_cfradial(radar, rad_time)
            last_Radar = radar
            out_path = (out_file_path + '/' + year_str + '/' + month_str +
                        '/' + day_str + '/')
            if not os.path.exists(out_path):
                os.makedirs(out_path)

            out_file = hour_str + minute_str + '.png'
            plt.figure(figsize=(7, 24))
            plt.subplot(311)
            display = pyart.graph.RadarMapDisplay(radar)
            display.plot_ppi(ref_field,
                             gatefilter=gatefilter,
                             sweep=0,
                             cmap=pyart.graph.cm.NWSRef,
                             vmin=0,
                             vmax=70)
            plt.subplot(312)
            display = pyart.graph.RadarMapDisplay(radar)
            display.plot_ppi('corrected_velocity',
                             gatefilter=gatefilter,
                             sweep=0,
                             cmap=pyart.graph.cm.NWSVel,
                             vmin=-30,
                             vmax=30)
            plt.savefig(out_path + out_file)
            plt.subplot(313)
            display = pyart.graph.RadarMapDisplay(radar)
            display.plot_ppi('phidp_bringi',
                             gatefilter=gatefilter,
                             sweep=0,
                             cmap='jet',
                             vmin=-360,
                             vmax=360)
            plt.savefig(out_path + out_file)
            plt.close()
        except:
            import sys
            exc_type, exc_obj, exc_tb = sys.exc_info()
            print('Skipping corrupt time' + year_str + '-' + month_str + ' ' +
                  hour_str + ':' + minute_str)
            print('Exception: ' + str(sys.exc_info()[0]) +
                  str(sys.exc_info()[1]) + str(exc_tb.tb_lineno))
    rerun_multidop_list.close()