def reflectivity_monthly_distribution(): '''Plots the distribution for each month in the data set. Given an archive of NEXRAD files we are going to seek to find the days where rainfall was present to pupulate out data set. Try and compute the monthly distribution of rainfall the prediction domain.''' DFW = DFWnet.CommonData() reflectivity = BuildDataSet.reflectivity_fields() # Loop thru all the days in the data set 121 - 144 # Download each file keep the level 3 data throw the rest # then check for each day whether there was a storm or not # if there was a storm keep that folder # else delete that folder and its contents order_dict = {} order_dict = {14: 'HAS010777764', 15: 'HAS010777767'} initial = os.getcwd() for yr in [14]: for d in range(159, 244): DFW.doytodate(int(yr), d) # file_to_get = 'NWS_NEXRAD_NXL3_KFWS_20' +DFW.yr + DFW.mon + DFW.day + '000000_20' + DFW.yr + DFW.mon + DFW.day + '235959.tar.gz' new_dir = '../data/RadarData/NEXRAD/20' + str( yr) + os.sep + Months[int(DFW.mon) - 1] + DFW.day + os.sep if not os.path.exists(new_dir): os.mkdir(new_dir) os.chdir(new_dir) reflectivity.FTPNEXRADfile(DFW.mon, DFW.day, DFW.yr, order_dict[yr]) os.chdir(initial) reflectivity.keepLevel3files(new_dir) reflectivity.ConvertToNETCDF(new_dir) # Develop a logic here which takes files that have an average greater than # 20 dBZ for its rainy days file_list = os.listdir(new_dir) # define an array the with size of the number of files in that day # and a 100x100 grid to hold each time step worth of data out_array = np.zeros((len(file_list), 100, 100)) time_array = [] for i, fl in enumerate(file_list): rad = Dataset(new_dir + fl) out_array[ i, ...] = reflectivity.reflectivity_polar_to_cartesian(rad) time_array.append(rad.time_coverage_end.split('T')[1]) os.remove(new_dir + fl) np.save( new_dir + 'reflectivity_array_' + str(yr) + '_' + str(d) + '.npy', out_array) np.save(new_dir + 'time_array_' + str(yr) + '_' + str(d) + '.npy', time_array)
def reflectivity_monthly_distribution(): '''Plots the distribution for each month in the data set. Given an archive of NEXRAD files we are going to seek to find the days where rainfall was present to pupulate out data set. Try and compute the monthly distribution of rainfall the prediction domain.''' DFW = DFWnet.CommonData() reflectivity = BuildDataSet.reflectivity_fields() # Loop thru all the days in the data set 121 - 144 # Download each file keep the level 3 data throw the rest # then check for each day whether there was a storm or not # if there was a storm keep that folder # else delete that folder and its contents order_dict = {} order_dict = {14: 'HAS010777764', 15: 'HAS010777767'} initial = os.getcwd() for yr in [14]: for d in range(159,244): DFW.doytodate(int(yr),d) # file_to_get = 'NWS_NEXRAD_NXL3_KFWS_20' +DFW.yr + DFW.mon + DFW.day + '000000_20' + DFW.yr + DFW.mon + DFW.day + '235959.tar.gz' new_dir = '../data/RadarData/NEXRAD/20' + str(yr) + os.sep + Months[int(DFW.mon) -1] + DFW.day + os.sep if not os.path.exists(new_dir): os.mkdir(new_dir) os.chdir(new_dir) reflectivity.FTPNEXRADfile(DFW.mon,DFW.day,DFW.yr,order_dict[yr]) os.chdir(initial) reflectivity.keepLevel3files(new_dir) reflectivity.ConvertToNETCDF(new_dir) # Develop a logic here which takes files that have an average greater than # 20 dBZ for its rainy days file_list = os.listdir(new_dir) # define an array the with size of the number of files in that day # and a 100x100 grid to hold each time step worth of data out_array = np.zeros((len(file_list),100,100)) time_array = [] for i,fl in enumerate(file_list): rad = Dataset(new_dir + fl) out_array[i,...] = reflectivity.reflectivity_polar_to_cartesian(rad) time_array.append(rad.time_coverage_end.split('T')[1]) os.remove(new_dir + fl) np.save(new_dir + 'reflectivity_array_' + str(yr) + '_' + str(d) + '.npy',out_array) np.save(new_dir + 'time_array_' + str(yr) + '_' + str(d) + '.npy',time_array)