def test_find_file_desire_unzipped_allow_unzipped_raise(self): """Ensures correct output from find_classification_file. In this case, desire_zipped = False; allow_zipped_or_unzipped = True; and raise_error_if_missing = True. """ with self.assertRaises(ValueError): echo_classifn.find_classification_file( top_directory_name=TOP_DIRECTORY_NAME, valid_time_unix_sec=VALID_TIME_UNIX_SEC, desire_zipped=False, allow_zipped_or_unzipped=False, raise_error_if_missing=True)
def test_find_file_desire_unzipped_allow_unzipped_no_raise(self): """Ensures correct output from find_classification_file. In this case, desire_zipped = False; allow_zipped_or_unzipped = True; and raise_error_if_missing = False. """ this_file_name = echo_classifn.find_classification_file( top_directory_name=TOP_DIRECTORY_NAME, valid_time_unix_sec=VALID_TIME_UNIX_SEC, desire_zipped=False, allow_zipped_or_unzipped=False, raise_error_if_missing=False) self.assertTrue(this_file_name == CLASSIFN_FILE_NAME_UNZIPPED)
def _run_for_myrorss(spc_date_string, top_radar_dir_name_tarred, top_radar_dir_name_untarred, top_output_dir_name, option_dict): """Runs echo classification for MYRORSS data. :param spc_date_string: See documentation at top of file. :param top_radar_dir_name_tarred: Same. :param top_radar_dir_name_untarred: Same. :param top_output_dir_name: Same. :param option_dict: See doc for `echo_classification.find_convective_pixels`. """ if top_radar_dir_name_tarred in ['', 'None']: top_radar_dir_name_tarred = None if top_radar_dir_name_tarred is not None: tar_file_name = '{0:s}/{1:s}/{2:s}.tar'.format( top_radar_dir_name_tarred, spc_date_string[:4], spc_date_string) myrorss_io.unzip_1day_tar_file( tar_file_name=tar_file_name, field_names=[radar_utils.REFL_NAME], spc_date_string=spc_date_string, top_target_directory_name=top_radar_dir_name_untarred, refl_heights_m_asl=RADAR_HEIGHTS_M_ASL) print SEPARATOR_STRING these_file_names = myrorss_and_mrms_io.find_raw_files_one_spc_date( spc_date_string=spc_date_string, field_name=radar_utils.REFL_NAME, data_source=radar_utils.MYRORSS_SOURCE_ID, top_directory_name=top_radar_dir_name_untarred, height_m_asl=RADAR_HEIGHTS_M_ASL[0], raise_error_if_missing=True) valid_times_unix_sec = numpy.array([ myrorss_and_mrms_io.raw_file_name_to_time(f) for f in these_file_names ], dtype=int) valid_times_unix_sec = numpy.sort(valid_times_unix_sec) start_time_unix_sec = time_conversion.get_start_of_spc_date( spc_date_string) end_time_unix_sec = time_conversion.get_end_of_spc_date(spc_date_string) good_indices = numpy.where( numpy.logical_and(valid_times_unix_sec >= start_time_unix_sec, valid_times_unix_sec <= end_time_unix_sec))[0] valid_times_unix_sec = valid_times_unix_sec[good_indices] radar_file_dict = myrorss_and_mrms_io.find_many_raw_files( desired_times_unix_sec=valid_times_unix_sec, spc_date_strings=[spc_date_string] * len(valid_times_unix_sec), data_source=radar_utils.MYRORSS_SOURCE_ID, field_names=[radar_utils.REFL_NAME], top_directory_name=top_radar_dir_name_untarred, reflectivity_heights_m_asl=RADAR_HEIGHTS_M_ASL) radar_file_name_matrix = radar_file_dict[ myrorss_and_mrms_io.RADAR_FILE_NAMES_KEY] valid_times_unix_sec = radar_file_dict[ myrorss_and_mrms_io.UNIQUE_TIMES_KEY] num_times = len(valid_times_unix_sec) num_heights = len(RADAR_HEIGHTS_M_ASL) for i in range(num_times): reflectivity_matrix_dbz = None fine_grid_point_latitudes_deg = None fine_grid_point_longitudes_deg = None found_corrupt_file = False for j in range(num_heights): print 'Reading data from: "{0:s}"...'.format( radar_file_name_matrix[i, j]) this_metadata_dict = ( myrorss_and_mrms_io.read_metadata_from_raw_file( netcdf_file_name=radar_file_name_matrix[i, j], data_source=radar_utils.MYRORSS_SOURCE_ID, raise_error_if_fails=False)) if this_metadata_dict is None: found_corrupt_file = True break this_sparse_grid_table = ( myrorss_and_mrms_io.read_data_from_sparse_grid_file( netcdf_file_name=radar_file_name_matrix[i, j], field_name_orig=this_metadata_dict[ myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG], data_source=radar_utils.MYRORSS_SOURCE_ID, sentinel_values=this_metadata_dict[ radar_utils.SENTINEL_VALUE_COLUMN])) (this_refl_matrix_dbz, fine_grid_point_latitudes_deg, fine_grid_point_longitudes_deg) = radar_s2f.sparse_to_full_grid( sparse_grid_table=this_sparse_grid_table, metadata_dict=this_metadata_dict) this_refl_matrix_dbz = numpy.expand_dims( this_refl_matrix_dbz[::2, ::2], axis=-1) if reflectivity_matrix_dbz is None: reflectivity_matrix_dbz = this_refl_matrix_dbz + 0. else: reflectivity_matrix_dbz = numpy.concatenate( (reflectivity_matrix_dbz, this_refl_matrix_dbz), axis=-1) print '\n' if found_corrupt_file: continue reflectivity_matrix_dbz = numpy.flip(reflectivity_matrix_dbz, axis=0) fine_grid_point_latitudes_deg = fine_grid_point_latitudes_deg[::-1] coarse_grid_point_latitudes_deg = fine_grid_point_latitudes_deg[::2] coarse_grid_point_longitudes_deg = fine_grid_point_longitudes_deg[::2] coarse_grid_metadata_dict = { echo_classifn.MIN_LATITUDE_KEY: numpy.min(coarse_grid_point_latitudes_deg), echo_classifn.LATITUDE_SPACING_KEY: (coarse_grid_point_latitudes_deg[1] - coarse_grid_point_latitudes_deg[0]), echo_classifn.MIN_LONGITUDE_KEY: numpy.min(coarse_grid_point_longitudes_deg), echo_classifn.LONGITUDE_SPACING_KEY: (coarse_grid_point_longitudes_deg[1] - coarse_grid_point_longitudes_deg[0]), echo_classifn.HEIGHTS_KEY: RADAR_HEIGHTS_M_ASL } fine_grid_metadata_dict = { echo_classifn.MIN_LATITUDE_KEY: numpy.min(fine_grid_point_latitudes_deg), echo_classifn.LATITUDE_SPACING_KEY: (fine_grid_point_latitudes_deg[1] - fine_grid_point_latitudes_deg[0]), echo_classifn.MIN_LONGITUDE_KEY: numpy.min(fine_grid_point_longitudes_deg), echo_classifn.LONGITUDE_SPACING_KEY: (fine_grid_point_longitudes_deg[1] - fine_grid_point_longitudes_deg[0]), echo_classifn.HEIGHTS_KEY: RADAR_HEIGHTS_M_ASL } convective_flag_matrix = echo_classifn.find_convective_pixels( reflectivity_matrix_dbz=reflectivity_matrix_dbz, grid_metadata_dict=coarse_grid_metadata_dict, valid_time_unix_sec=valid_times_unix_sec[i], option_dict=option_dict) print 'Number of convective pixels = {0:d}\n'.format( numpy.sum(convective_flag_matrix)) convective_flag_matrix = echo_classifn._double_class_resolution( coarse_convective_flag_matrix=convective_flag_matrix, coarse_grid_point_latitudes_deg=coarse_grid_point_latitudes_deg, coarse_grid_point_longitudes_deg=coarse_grid_point_longitudes_deg, fine_grid_point_latitudes_deg=fine_grid_point_latitudes_deg, fine_grid_point_longitudes_deg=fine_grid_point_longitudes_deg) this_output_file_name = echo_classifn.find_classification_file( top_directory_name=top_output_dir_name, valid_time_unix_sec=valid_times_unix_sec[i], desire_zipped=False, allow_zipped_or_unzipped=False, raise_error_if_missing=False) print 'Writing echo classifications to: "{0:s}"...'.format( this_output_file_name) echo_classifn.write_classifications( convective_flag_matrix=convective_flag_matrix, grid_metadata_dict=fine_grid_metadata_dict, valid_time_unix_sec=valid_times_unix_sec[i], option_dict=option_dict, netcdf_file_name=this_output_file_name) unzipping.gzip_file(input_file_name=this_output_file_name, delete_input_file=True) print SEPARATOR_STRING if top_radar_dir_name_tarred is None: return myrorss_io.remove_unzipped_data_1day( spc_date_string=spc_date_string, top_directory_name=top_radar_dir_name_untarred, field_names=[radar_utils.REFL_NAME], refl_heights_m_asl=RADAR_HEIGHTS_M_ASL) print SEPARATOR_STRING
def _run_for_gridrad(spc_date_string, top_radar_dir_name, top_output_dir_name, option_dict): """Runs echo classification for GridRad data. :param spc_date_string: See documentation at top of file. :param top_radar_dir_name: Same. :param top_output_dir_name: Same. :param option_dict: See doc for `echo_classification.find_convective_pixels`. """ valid_times_unix_sec = time_periods.range_and_interval_to_list( start_time_unix_sec=time_conversion.get_start_of_spc_date( spc_date_string), end_time_unix_sec=time_conversion.get_end_of_spc_date(spc_date_string), time_interval_sec=TIME_INTERVAL_SEC, include_endpoint=True) num_times = len(valid_times_unix_sec) radar_file_names = [''] * num_times indices_to_keep = [] for i in range(num_times): radar_file_names[i] = gridrad_io.find_file( top_directory_name=top_radar_dir_name, unix_time_sec=valid_times_unix_sec[i], raise_error_if_missing=False) if os.path.isfile(radar_file_names[i]): indices_to_keep.append(i) indices_to_keep = numpy.array(indices_to_keep, dtype=int) valid_times_unix_sec = valid_times_unix_sec[indices_to_keep] radar_file_names = [radar_file_names[k] for k in indices_to_keep] num_times = len(valid_times_unix_sec) for i in range(num_times): print 'Reading data from: "{0:s}"...\n'.format(radar_file_names[i]) radar_metadata_dict = gridrad_io.read_metadata_from_full_grid_file( netcdf_file_name=radar_file_names[i]) (reflectivity_matrix_dbz, all_heights_m_asl, grid_point_latitudes_deg, grid_point_longitudes_deg ) = gridrad_io.read_field_from_full_grid_file( netcdf_file_name=radar_file_names[i], field_name=radar_utils.REFL_NAME, metadata_dict=radar_metadata_dict) reflectivity_matrix_dbz = numpy.rollaxis(reflectivity_matrix_dbz, axis=0, start=3) height_indices = numpy.array( [all_heights_m_asl.tolist().index(h) for h in RADAR_HEIGHTS_M_ASL], dtype=int) reflectivity_matrix_dbz = reflectivity_matrix_dbz[..., height_indices] grid_metadata_dict = { echo_classifn.MIN_LATITUDE_KEY: numpy.min(grid_point_latitudes_deg), echo_classifn.LATITUDE_SPACING_KEY: grid_point_latitudes_deg[1] - grid_point_latitudes_deg[0], echo_classifn.MIN_LONGITUDE_KEY: numpy.min(grid_point_longitudes_deg), echo_classifn.LONGITUDE_SPACING_KEY: grid_point_longitudes_deg[1] - grid_point_longitudes_deg[0], echo_classifn.HEIGHTS_KEY: RADAR_HEIGHTS_M_ASL } convective_flag_matrix = echo_classifn.find_convective_pixels( reflectivity_matrix_dbz=reflectivity_matrix_dbz, grid_metadata_dict=grid_metadata_dict, valid_time_unix_sec=valid_times_unix_sec[i], option_dict=option_dict) print 'Number of convective pixels = {0:d}\n'.format( numpy.sum(convective_flag_matrix)) this_output_file_name = echo_classifn.find_classification_file( top_directory_name=top_output_dir_name, valid_time_unix_sec=valid_times_unix_sec[i], desire_zipped=False, allow_zipped_or_unzipped=False, raise_error_if_missing=False) print 'Writing echo classifications to: "{0:s}"...'.format( this_output_file_name) echo_classifn.write_classifications( convective_flag_matrix=convective_flag_matrix, grid_metadata_dict=grid_metadata_dict, valid_time_unix_sec=valid_times_unix_sec[i], option_dict=option_dict, netcdf_file_name=this_output_file_name) print SEPARATOR_STRING
def _run(top_radar_dir_name, top_echo_classifn_dir_name, valid_time_string, min_latitude_deg, max_latitude_deg, min_longitude_deg, max_longitude_deg, output_dir_name): """Makes figure to explain storm detection. This is effectively the main method. :param top_radar_dir_name: See documentation at top of file. :param top_echo_classifn_dir_name: Same. :param valid_time_string: Same. :param min_latitude_deg: Same. :param max_latitude_deg: Same. :param min_longitude_deg: Same. :param max_longitude_deg: Same. :param output_dir_name: Same. """ file_system_utils.mkdir_recursive_if_necessary( directory_name=output_dir_name ) valid_time_unix_sec = time_conversion.string_to_unix_sec( valid_time_string, TIME_FORMAT ) spc_date_string = time_conversion.time_to_spc_date_string( valid_time_unix_sec ) num_trials = len(MIN_POLYGON_SIZES_PX) tracking_dir_names = [None] * num_trials for k in range(num_trials): tracking_dir_names[k] = ( '{0:s}/tracking/min-polygon-size-px={1:d}_recompute-centroids={2:d}' ).format( output_dir_name, MIN_POLYGON_SIZES_PX[k], int(RECOMPUTE_CENTROID_FLAGS[k]) ) echo_top_tracking.run_tracking( top_radar_dir_name=top_radar_dir_name, top_output_dir_name=tracking_dir_names[k], first_spc_date_string=spc_date_string, last_spc_date_string=spc_date_string, first_time_unix_sec=valid_time_unix_sec, last_time_unix_sec=valid_time_unix_sec + 1, top_echo_classifn_dir_name=top_echo_classifn_dir_name, min_polygon_size_pixels=MIN_POLYGON_SIZES_PX[k], recompute_centroids=RECOMPUTE_CENTROID_FLAGS[k] ) print(SEPARATOR_STRING) echo_top_file_name = myrorss_and_mrms_io.find_raw_file( top_directory_name=top_radar_dir_name, unix_time_sec=valid_time_unix_sec, spc_date_string=spc_date_string, field_name=radar_utils.ECHO_TOP_40DBZ_NAME, data_source=radar_utils.MYRORSS_SOURCE_ID, raise_error_if_missing=True ) print('Reading data from: "{0:s}"...'.format(echo_top_file_name)) metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file( netcdf_file_name=echo_top_file_name, data_source=radar_utils.MYRORSS_SOURCE_ID ) sparse_grid_table = myrorss_and_mrms_io.read_data_from_sparse_grid_file( netcdf_file_name=echo_top_file_name, field_name_orig=metadata_dict[ myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG], data_source=radar_utils.MYRORSS_SOURCE_ID, sentinel_values=metadata_dict[radar_utils.SENTINEL_VALUE_COLUMN] ) echo_top_matrix_km_asl, radar_latitudes_deg, radar_longitudes_deg = ( radar_s2f.sparse_to_full_grid( sparse_grid_table=sparse_grid_table, metadata_dict=metadata_dict) ) echo_top_matrix_km_asl = numpy.flip(echo_top_matrix_km_asl, axis=0) radar_latitudes_deg = radar_latitudes_deg[::-1] echo_classifn_file_name = echo_classifn.find_classification_file( top_directory_name=top_echo_classifn_dir_name, valid_time_unix_sec=valid_time_unix_sec, desire_zipped=True, allow_zipped_or_unzipped=True, raise_error_if_missing=True ) print('Reading data from: "{0:s}"...'.format(echo_classifn_file_name)) convective_flag_matrix = echo_classifn.read_classifications( echo_classifn_file_name )[0] good_indices = numpy.where(numpy.logical_and( radar_latitudes_deg >= min_latitude_deg, radar_latitudes_deg <= max_latitude_deg ))[0] echo_top_matrix_km_asl = echo_top_matrix_km_asl[good_indices, ...] convective_flag_matrix = convective_flag_matrix[good_indices, ...] radar_latitudes_deg = radar_latitudes_deg[good_indices] good_indices = numpy.where(numpy.logical_and( radar_longitudes_deg >= min_longitude_deg, radar_longitudes_deg <= max_longitude_deg ))[0] echo_top_matrix_km_asl = echo_top_matrix_km_asl[..., good_indices] convective_flag_matrix = convective_flag_matrix[..., good_indices] radar_longitudes_deg = radar_longitudes_deg[good_indices] this_figure_object, this_axes_object = _plot_echo_tops( echo_top_matrix_km_asl=echo_top_matrix_km_asl, latitudes_deg=radar_latitudes_deg, longitudes_deg=radar_longitudes_deg, plot_colour_bar=False, convective_flag_matrix=None )[:2] this_axes_object.set_title('All echoes') plotting_utils.label_axes(axes_object=this_axes_object, label_string='(a)') panel_file_names = [ '{0:s}/before_echo_classification.jpg'.format(output_dir_name) ] print('Saving figure to: "{0:s}"...'.format(panel_file_names[-1])) this_figure_object.savefig( panel_file_names[-1], dpi=FIGURE_RESOLUTION_DPI, pad_inches=0, bbox_inches='tight' ) pyplot.close(this_figure_object) this_figure_object, this_axes_object = _plot_echo_tops( echo_top_matrix_km_asl=echo_top_matrix_km_asl, latitudes_deg=radar_latitudes_deg, longitudes_deg=radar_longitudes_deg, plot_colour_bar=False, convective_flag_matrix=convective_flag_matrix )[:2] this_axes_object.set_title('Convective echoes only') plotting_utils.label_axes(axes_object=this_axes_object, label_string='(b)') panel_file_names.append( '{0:s}/after_echo_classification.jpg'.format(output_dir_name) ) print('Saving figure to: "{0:s}"...'.format(panel_file_names[-1])) this_figure_object.savefig( panel_file_names[-1], dpi=FIGURE_RESOLUTION_DPI, pad_inches=0, bbox_inches='tight' ) pyplot.close(this_figure_object) letter_label = 'b' for k in range(num_trials): this_tracking_file_name = tracking_io.find_file( top_tracking_dir_name=tracking_dir_names[k], tracking_scale_metres2= echo_top_tracking.DUMMY_TRACKING_SCALE_METRES2, source_name=tracking_utils.SEGMOTION_NAME, valid_time_unix_sec=valid_time_unix_sec, spc_date_string=spc_date_string, raise_error_if_missing=True ) print('Reading data from: "{0:s}"...'.format(this_tracking_file_name)) this_storm_object_table = tracking_io.read_file(this_tracking_file_name) this_figure_object, this_axes_object, this_basemap_object = ( _plot_echo_tops( echo_top_matrix_km_asl=echo_top_matrix_km_asl, latitudes_deg=radar_latitudes_deg, longitudes_deg=radar_longitudes_deg, plot_colour_bar=k > 0, convective_flag_matrix=convective_flag_matrix) ) storm_plotting.plot_storm_outlines( storm_object_table=this_storm_object_table, axes_object=this_axes_object, basemap_object=this_basemap_object, line_width=POLYGON_WIDTH, line_colour=POLYGON_COLOUR ) these_x_metres, these_y_metres = this_basemap_object( this_storm_object_table[ tracking_utils.CENTROID_LONGITUDE_COLUMN].values, this_storm_object_table[ tracking_utils.CENTROID_LATITUDE_COLUMN].values ) this_axes_object.plot( these_x_metres, these_y_metres, linestyle='None', marker=MARKER_TYPE, markersize=MARKER_SIZE, markerfacecolor=MARKER_COLOUR, markeredgecolor=MARKER_COLOUR, markeredgewidth=MARKER_EDGE_WIDTH ) this_title_string = ( 'Minimum size = {0:d} GP, {1:s} storm centers' ).format( MIN_POLYGON_SIZES_PX[k], 'recomputed' if RECOMPUTE_CENTROID_FLAGS[k] else 'original' ) this_axes_object.set_title(this_title_string) letter_label = chr(ord(letter_label) + 1) plotting_utils.label_axes( axes_object=this_axes_object, label_string='({0:s})'.format(letter_label) ) panel_file_names.append( '{0:s}/detection{1:d}.jpg'.format(output_dir_name, k) ) print('Saving figure to: "{0:s}"...'.format(panel_file_names[-1])) this_figure_object.savefig( panel_file_names[-1], dpi=FIGURE_RESOLUTION_DPI, pad_inches=0, bbox_inches='tight' ) pyplot.close(this_figure_object) concat_file_name = '{0:s}/storm_detection.jpg'.format(output_dir_name) print('Concatenating panels to: "{0:s}"...'.format(concat_file_name)) imagemagick_utils.concatenate_images( input_file_names=panel_file_names, output_file_name=concat_file_name, num_panel_rows=3, num_panel_columns=2 )