def test_incorrect_year(self): # Now provide something that doesn't have the expected format print("\nRunning unittest: incorrect_year") try: util.extract_year_month('39990704_12', self.logger) except: self.assertTrue(True)
def run_at_time(self, cur_init): """!Get TC-paris data then regrid tiles centered on the storm. Get TC-pairs track data and GFS model data, do any necessary processing then regrid the forecast and analysis files to a 30 x 30 degree tile centered on the storm. Args: Returns: None: invokes regrid_data_plane to create a netCDF file from two extratropical storm track files. """ # pylint:disable=protected-access # Need to call sys.__getframe() to get the filename and method/func # for logging information. # Used in logging cur_filename = sys._getframe().f_code.co_filename cur_function = sys._getframe().f_code.co_name # get the process id to be used to identify the output # amongst different users and runs. cur_pid = str(os.getpid()) tmp_dir = os.path.join(self.config.getdir('TMP_DIR'), cur_pid) msg = ("INFO|[" + cur_filename + ":" + cur_function + "]" "|Begin extract tiles") self.logger.info(msg) # Check that there are tc_pairs data which are used as input if util.is_dir_empty(self.tc_pairs_dir): msg = ("ERROR|[" + cur_filename + ":" + cur_function + "]" "|No tc pairs data found at " + self.tc_pairs_dir + "Exiting...") self.logger.error(msg) sys.exit(1) # Logging output: TIME UTC |TYPE (DEBUG, INFO, WARNING, etc.) | # [File : function]| Message logger.info("INFO | [" + # cur_filename + ":" + "cur_function] |" + "BEGIN extract_tiles") # Process TC pairs by initialization time # Begin processing for initialization time, cur_init year_month = util.extract_year_month(cur_init, self.logger) # Create the name of the filter file we need to find. If # the file doesn't exist, then run TC_STAT filter_filename = "filter_" + cur_init + ".tcst" filter_name = os.path.join(self.filtered_out_dir, cur_init, filter_filename) if util.file_exists(filter_name) and not self.overwrite_flag: msg = ("DEBUG| [" + cur_filename + ":" + cur_function + " ] | Filter file exists, using Track data file: " + filter_name) self.logger.debug(msg) else: # Create the storm track by applying the # filter options defined in the config/param file. tile_dir_parts = [self.tc_pairs_dir, "/", year_month] tile_dir = ''.join(tile_dir_parts) # Use TcStatWrapper to build up the tc_stat command and invoke # the MET tool tc_stat to perform the filtering. tcs = TcStatWrapper(self.config) tcs.build_tc_stat(self.filtered_out_dir, cur_init, tile_dir, self.addl_filter_opts) # Remove any empty files and directories that can occur # from filtering. util.prune_empty(filter_name, self.logger) # Now get unique storm ids from the filter file, # filter_yyyymmdd_hh.tcst sorted_storm_ids = util.get_storm_ids(filter_name, self.logger) # Check for empty sorted_storm_ids, if empty, # continue to the next time. if not sorted_storm_ids: # No storms found for init time, cur_init msg = ("DEBUG|[" + cur_filename + ":" + cur_function + " ]|" + "No storms were found for " + cur_init + "...continue to next in list") self.logger.debug(msg) return # Process each storm in the sorted_storm_ids list # Iterate over each filter file in the output directory and # search for the presence of the storm id. Store this # corresponding row of data into a temporary file in the # /tmp/<pid> directory. for cur_storm in sorted_storm_ids: storm_output_dir = os.path.join(self.filtered_out_dir, cur_init, cur_storm) header = open(filter_name, "r").readline() util.mkdir_p(storm_output_dir) util.mkdir_p(tmp_dir) tmp_filename = "filter_" + cur_init + "_" + cur_storm full_tmp_filename = os.path.join(tmp_dir, tmp_filename) storm_match_list = util.grep(cur_storm, filter_name) with open(full_tmp_filename, "a+") as tmp_file: # copy over header information tmp_file.write(header) for storm_match in storm_match_list: tmp_file.write(storm_match) # Perform regridding of the forecast and analysis files # to an n X n degree tile centered on the storm (dimensions # are indicated in the config/param file). util.retrieve_and_regrid(full_tmp_filename, cur_init, cur_storm, self.filtered_out_dir, self.logger, self.config) # end of for cur_storm # Remove any empty files and directories in the extract_tiles output # directory util.prune_empty(self.filtered_out_dir, self.logger) # Clean up the tmp directory if it exists if os.path.isdir(tmp_dir): util.rmtree(tmp_dir) msg = ("INFO|[" + cur_function + ":" + cur_filename + "]" "| Finished extract tiles") self.logger.info(msg)
def test_archive_date(self): print("\nRunning unittest: archive_date") # Expect 199907 to be extracted from 19990704_12 #self.assertEqual(self.et.extract_year_month('19990704_12',self.p), '199907') self.assertEqual(util.extract_year_month('19990704_12', self.logger), '199907')
def test_extract_year_month(self): print("\nRunning unittest: extract_year_month") # Expect 201607 to be extracted from 20160704_12 #self.assertEqual(self.et.extract_year_month('20160704_12',self.p), '201607') self.assertEqual(util.extract_year_month('20160704_12', self.logger), '201607')