def produce_all_sector_metadata(final_products, area_def, xarray_obj, metadata_dir='metadata'): ''' Produce metadata for all products listed in "final_products" - all products should cover area_def region Args: final_products (list) : list of strings, containing paths to all products that need metadata files generated. area_def (AreaDefinition) : pyresample AreaDefinition that was used to produce all products in final_products metadata_dir (str) : DEFAULT 'metadata' Specify subdirectory to use for all metadata - allow for alternate directory for "non-operational" test products outside the 'metadata' directory. Returns: (list) : list of strings, containing full paths to new YAML metadata files ''' yaml_products = [] from geoips2.sector_utils.utils import is_sector_type if is_sector_type(area_def, 'atcf'): for final_product in final_products: if 'yaml' in final_product: continue from geoips2.sector_utils.atcf_tracks import produce_sector_metadata yaml_products += produce_sector_metadata(area_def, xarray_obj, final_product, metadata_dir=metadata_dir) return yaml_products
def remove_duplicates(fnames, area_def, remove_files=False): removed_files = [] saved_files = [] from geoips2.sector_utils.utils import is_sector_type from geoips2.filenames.atcf_filenames import atcf_web_filename_remove_duplicates from geoips2.filenames.atcf_filenames import metoctiff_filename_remove_duplicates from geoips2.filenames.old_tcweb_fnames import old_tcweb_fnames_remove_duplicates from geoips2.filenames.product_filenames import standard_geoips_filename_remove_duplicates for fname in fnames: if is_sector_type(area_def, 'atcf'): curr_removed_files, curr_saved_files = atcf_web_filename_remove_duplicates(fname, remove_files=remove_files) removed_files += curr_removed_files saved_files += curr_saved_files curr_removed_files, curr_saved_files = old_tcweb_fnames_remove_duplicates(fname, remove_files=remove_files) removed_files += curr_removed_files saved_files += curr_saved_files curr_removed_files, curr_saved_files = metoctiff_filename_remove_duplicates(fname, remove_files=remove_files) removed_files += curr_removed_files saved_files += curr_saved_files else: curr_removed_files, curr_saved_files = standard_geoips_filename_remove_duplicates(fname, remove_files=remove_files) removed_files += curr_removed_files saved_files += curr_saved_files # from IPython import embed as shell; shell() return removed_files, saved_files
def get_title_string_from_objects(area_def, xarray_obj, product_name_title, product_datatype_title=None, bg_xarray=None, bg_product_name_title=None, bg_datatype_title=None): from geoips2.filenames.base_paths import PATHS as gpaths from geoips2.sector_utils.utils import is_sector_type if product_datatype_title is None: product_datatype_title = '{0} {1}'.format( xarray_obj.platform_name.upper(), xarray_obj.source_name.upper()) if bg_xarray is not None and bg_datatype_title is None: bg_datatype_title = '{0} {1}'.format(bg_xarray.platform_name.upper(), bg_xarray.source_name.upper()) if is_sector_type(area_def, 'atcf'): LOG.info('Setting dynamic title') # Make sure we reflect the actual start_datetime in the filename # geoimg_obj.set_geoimg_attrs(start_dt=xarray_obj.start_datetime) title_line1 = '{0}{1:02d} {2} at {3}, {4}'.format( area_def.sector_info['storm_basin'], int(area_def.sector_info['storm_num']), area_def.sector_info['storm_name'], area_def.sector_info['synoptic_time'], gpaths['GEOIPS_COPYRIGHT']) # pandas dataframes seem to handle time objects much better than xarray. title_line2 = '{0} {1} at {2}'.format(product_datatype_title, product_name_title, xarray_obj.start_datetime) if bg_xarray is not None: title_line3 = '{0} {1} at {2}'.format(bg_datatype_title, bg_product_name_title, bg_xarray.start_datetime) title_string = '{0}\n{1}\n{2}'.format(title_line1, title_line2, title_line3) else: title_string = '{0}\n{1}'.format(title_line1, title_line2) LOG.info('title_string: %s', title_string) else: title_line1 = '{0} {1}'.format(product_datatype_title, product_name_title) title_line2 = '{0} {1}'.format( xarray_obj.start_datetime.strftime('%Y/%m/%d %H:%M:%SZ'), gpaths['GEOIPS_COPYRIGHT']) if bg_xarray is not None: title_line3 = '{0} {1} at {2}'.format(bg_datatype_title, bg_product_name_title, bg_xarray.start_datetime) title_string = '{0}\n{1}\n{2}'.format(title_line1, title_line2, title_line3) else: title_string = '{0}\n{1}'.format(title_line1, title_line2) LOG.info('Not dynamic, using standard title_string: %s', title_string) return title_string
def plot_windspeeds(wind_xarray, area_def, interp_data, mtif_area_def=None, mtif_interp_data=None, text_area_def=None, vis_data=None, ir_data=None, vis_xarray=None, ir_xarray=None): ''' Plot wind speed files, based on the current data and area definitions Args: wind_xarray (Dataset) : xarray Dataset containing metadata information area_def (AreaDefinition) : pyresample AreaDefintion for full data array interp_data (ndarray) : numpy.ma.MaskedArray of data to plot, relating to area_def mtif_area_def (AreaDefinition) : pyresample AreaDefinition pertaining to native resolution mtif data mtif_interp_data (ndarray) : numpy.ma.MaskedArray of data to plot, relating to mtif_area_def text_area_def (AreaDefinition) : pyresample AreaDefinition pertaining to non-interpolated sectored data Returns: (list, list, list) : List of strings of full final products , List of strings of mtif final products List of strings for text final products ''' final_products = [] mtif_final_products = [] text_final_products = [] product_name = 'windspeed' product_name_title = 'Winds' product_datatype_title = None if wind_xarray.source_name in PRODUCT_DATATYPE_TITLE: product_datatype_title = PRODUCT_DATATYPE_TITLE[ wind_xarray.source_name] covg = percent_unmasked(interp_data) bg_mpl_colors_info = None bg_data = None bg_xarray = None bg_product_name_title = None if vis_data is not None: from geoips2.products.visir import DATARANGE_LIST, CMAPLIST bg_data = vis_data bg_xarray = vis_xarray bg_product_name_title = 'Visible' # Create the matplotlib color info dict - the fields in this dictionary (cmap, norm, boundaries, # etc) will be used in plot_image bg_mpl_colors_info = set_matplotlib_colors_standard( DATARANGE_LIST['Visible-Gray'], cmap_name=CMAPLIST['Visible-Gray'], cbar_label=None, create_colorbar=False) elif ir_data is not None: from geoips2.products.visir import DATARANGE_LIST, CMAPLIST # Create the matplotlib color info dict - the fields in this dictionary (cmap, norm, boundaries, # etc) will be used in plot_image bg_data = ir_data bg_xarray = ir_xarray bg_product_name_title = 'Infrared' bg_mpl_colors_info = set_matplotlib_colors_standard( DATARANGE_LIST['Infrared-Gray'], cmap_name=CMAPLIST['Infrared-Gray'], cbar_label=None, create_colorbar=False) if is_sector_type(area_def, 'atcf'): mtif_covg = percent_unmasked(mtif_interp_data) # get filename from objects atcf_fname = output_atcf_fname(area_def, wind_xarray, product_name, covg, output_type='png', output_type_dir='png', product_dir=product_name) old_tcweb_fname = output_atcf_fname(area_def, wind_xarray, product_name, covg, output_type='jpg', product_dir=product_name, output_old_tc_web=True) atcf_fname_clean = output_atcf_fname(area_def, wind_xarray, product_name + 'Clean', covg, output_type='png', output_type_dir='png_clean', product_dir=product_name) annotated_fnames = [atcf_fname] if old_tcweb_fname is not None and wind_xarray.source_name in OLDWEB and product_name in OLDWEB[ wind_xarray.source_name]: annotated_fnames += [old_tcweb_fname] # atcf_fname_clean = None # Create the matplotlib color info dict - the fields in this dictionary (cmap, norm, boundaries, # etc) will be used in plot_image to ensure the image matches the colorbar. mpl_colors_info = set_matplotlib_colors_winds( min_wind_speed=PNG_MIN_WIND_SPEED, max_wind_speed=PNG_MAX_WIND_SPEED) final_products += create_standard_imagery( area_def, plot_data=interp_data, xarray_obj=wind_xarray, product_name_title=product_name_title, clean_fname=atcf_fname_clean, annotated_fnames=annotated_fnames, mpl_colors_info=mpl_colors_info, boundaries_info=BOUNDARIES_INFO, gridlines_info=GRIDLINES_INFO, product_datatype_title=product_datatype_title, bg_data=bg_data, bg_mpl_colors_info=bg_mpl_colors_info, bg_xarray=bg_xarray, bg_product_name_title=bg_product_name_title) units = 'kts' if 'units' in wind_xarray['wind_speed_kts'].attrs.keys(): units = wind_xarray['wind_speed_kts'].attrs['units'] # Different color bars for MTIFs and PNGs - if the min/max wind speed in the color bar lines up with the # min/max scale values, then the colors will line up exactly with the 8 bit integer values. mpl_colors_info_mtif = set_matplotlib_colors_winds( min_wind_speed=MTIF_MIN_WIND_SPEED, max_wind_speed=MTIF_MAX_WIND_SPEED) mtif_final_products += output_metoctiff( product_name, mtif_area_def, wind_xarray, mtif_interp_data, requested_data_min=MTIF_MIN_WIND_SPEED, requested_data_max=MTIF_MAX_WIND_SPEED, scale_data_min=MTIF_SCALE_DATA_MIN, scale_data_max=MTIF_SCALE_DATA_MAX, missing_value=MTIF_MISSING_VALUE, coverage=mtif_covg, mpl_cmap=mpl_colors_info_mtif['cmap'], units=units) text_final_products += output_windspeed_text(wind_xarray, area_def=area_def) else: # get filename from objects web_fname = output_geoips_fname(area_def, wind_xarray, product_name, covg) # Create the matplotlib color info dict - the fields in this dictionary (cmap, norm, boundaries, # etc) will be used in plot_image to ensure the image matches the colorbar. mpl_colors_info = set_matplotlib_colors_winds( min_wind_speed=PNG_MIN_WIND_SPEED, max_wind_speed=PNG_MAX_WIND_SPEED) final_products += create_standard_imagery( area_def, plot_data=interp_data, xarray_obj=wind_xarray, product_name_title=product_name_title, clean_fname=None, annotated_fnames=[web_fname], mpl_colors_info=mpl_colors_info, boundaries_info=BOUNDARIES_INFO, bg_data=bg_data, bg_mpl_colors_info=bg_mpl_colors_info, bg_xarray=bg_xarray) return final_products, mtif_final_products, text_final_products
def windspeed(full_xarrays, area_def): ''' Product specification to appropriately sector and plot wind speed data NOTE in geoips/geoimg/plot/prototypealg.py scifile is converted to xarray BEFORE being passed sector is converted to area_def BEFORE being passed from geoips2.geoips1_utils.scifile import xarray_from_scifile from geoips2.geoips1_utils.sector import area_def_from_sector ''' final_products = [] # These use a different area definition - make sure the correct one is recorded. mtif_final_products = [] text_final_products = [] wind_xarray = None ir_xarray = None vis_xarray = None interp_bg = None for xobj in full_xarrays: if 'wind_speed_kts' in xobj.variables.keys(): wind_xarray = xobj if 'Infrared-Gray' in xobj.variables.keys(): ir_xarray = xobj ir_covg = percent_unmasked( ir_xarray['Infrared-Gray'].to_masked_array()) if 'Visible-Gray' in xobj.variables.keys(): vis_xarray = xobj vis_covg = percent_unmasked( vis_xarray['Visible-Gray'].to_masked_array()) if vis_xarray and ir_xarray and ir_covg > vis_covg: vis_xarray = None [interp_data] = interp_nearest(area_def, wind_xarray, varlist=['wind_speed_kts']) interp_vis = None interp_ir = None if vis_xarray is not None: [interp_vis] = interp_nearest(area_def, vis_xarray, varlist=['Visible-Gray']) elif ir_xarray is not None: [interp_ir] = interp_nearest(area_def, ir_xarray, varlist=['Infrared-Gray']) mtif_area_def = None mtif_interp_data = None text_area_def = None if is_sector_type(area_def, 'atcf'): mtif_area_def = set_mtif_area_def(wind_xarray, area_def) [mtif_interp_data] = interp_nearest(mtif_area_def, wind_xarray, varlist=['wind_speed_kts']) text_area_def = set_text_area_def(wind_xarray, area_def) if mtif_interp_data is None: mtif_interp_data = interp_data covg = percent_unmasked(interp_data) # Assume this is set in the reader. # if covg > wind_xarray.minimum_coverage: # Now that we are checking coverage within 4 deg x 4deg center box, we can now take ANY amount of overall coverage. if covg > 1: final_products, mtif_final_products, text_final_products = plot_windspeeds( wind_xarray, area_def, interp_data, mtif_area_def, mtif_interp_data, text_area_def, vis_data=interp_vis, ir_data=interp_ir, vis_xarray=vis_xarray, ir_xarray=ir_xarray) else: LOG.info('SKIPPING Insufficient coverage %s%%, minimum required %s%%', covg, wind_xarray.minimum_coverage) # This generates YAML files of sector-related metadata for all products in the final_products list # NOTE this produces metadata based on "area_def" - if we begin re-centering the storm, ensure this is updated to # reflect varying area definitions final_products += produce_all_sector_metadata(final_products, area_def, wind_xarray) mtif_final_products += produce_all_sector_metadata(mtif_final_products, mtif_area_def, wind_xarray) text_final_products += produce_all_sector_metadata(text_final_products, text_area_def, wind_xarray) return final_products + mtif_final_products + text_final_products
def visir_driver(fnames, command_line_args=None): ''' Overall visir driver. This handles reading the datafiles, determining appropriate sectors based on file time, then calling the appropriate products on each sector. Parameters: fnames (list): list of strings specifying the files on disk to process command_line_args (dict) : dictionary of command line arguments 'readername': Explicitly request reader geoips2*.readers.readername.readername Optional: 'sectorfiles': list of YAML sectorfiles 'sectorlist': list of desired sectors found in "sectorfiles" tc<YYYY><BASIN><NUM><NAME> for TCs, ie tc2020sh16gabekile If sectorfiles and sectorlist not included, looks in database Returns: (list) : Return list of strings specifying full paths to output products that were produced ''' from datetime import datetime process_datetimes = {} process_datetimes['overall_start'] = datetime.utcnow() final_products = [] sectorfiles = command_line_args['sectorfiles'] sectorlist = command_line_args['sectorlist'] readername = command_line_args['readername'] variables = command_line_args['variables'] if sectorfiles and not isinstance(sectorfiles, list): raise TypeError( 'Must pass list of strings for "sectorfiles" dictionary entry') if sectorlist and not isinstance(sectorlist, list): raise TypeError( 'Must pass list of strings for "sectorfiles" dictionary entry') reader = find_module_in_geoips2_packages(module_name='readers', method_name=readername) product = find_module_in_geoips2_packages(module_name='products', method_name='visir') LOG.info('fnames: %s', fnames) LOG.info('command_line_args: %s', command_line_args) LOG.info('reader: %s', reader) LOG.info('variables: %s', variables) xarrays = reader(fnames, metadata_only=True, chans=variables) num_jobs = 0 area_defs = [] actual_datetime = None for curr_xarray in xarrays: from geoips2.sector_utils.utils import get_area_defs_for_xarray, filter_area_defs_actual_time actual_datetime = curr_xarray.start_datetime area_defs += get_area_defs_for_xarray(curr_xarray, sectorfiles, sectorlist) area_defs = filter_area_defs_actual_time(area_defs, actual_datetime) LOG.info('Area defs:\n%s', '\n'.join([area_def.name for area_def in area_defs])) for area_def in area_defs: process_datetimes[area_def.area_id] = {} process_datetimes[area_def.area_id]['start'] = datetime.utcnow() from geoips2.sector_utils.atcf_tracks import set_atcf_area_def from geoips2.sector_utils.utils import is_sector_type pad_area_def = area_def if is_sector_type(area_def, 'atcf'): # Get an extra 10% size for TCs so we can handle recentering and not have missing data. num_lines = int(area_def.y_size * 1.10) num_samples = int(area_def.x_size * 1.10) pad_area_def = set_atcf_area_def( area_def.sector_info, num_lines=num_lines, num_samples=num_samples, pixel_width=area_def.pixel_size_x, pixel_height=area_def.pixel_size_y) try: xarrays = reader(fnames, metadata_only=False, area_def=pad_area_def, chans=variables) except IndexError as resp: LOG.warning('SKIPPING no coverage') process_datetimes[area_def.area_id]['fail'] = datetime.utcnow() continue except TypeError as resp: LOG.warning('CONTINUE not sectorable reader, use full dataset') LOG.info('Trying area_def %s', area_def) final_products += product(xarrays, area_def) num_jobs += 1 process_datetimes[area_def.area_id]['end'] = datetime.utcnow() process_datetimes['overall_end'] = datetime.utcnow() from geoips2.geoips2_utils import output_process_times output_process_times(process_datetimes, num_jobs) return final_products
def plot_interp_data(interp_data, xarray_obj, area_def, varname, product_name=None): ''' Plot the current interpolated data array, using metadata found in xarray_obj and area_def Args: interp_data (numpy.ndarray) : ndarray or MaskedArray of data to plot xarray_obj (Dataset) : xarray Dataset containing appropriate metadata for naming files, etc area_def (AreaDefinition) : Pyresample AreaDefinition specifying the region to plot varname (str) : Name of variable that we are plotting out of xarray_obj Returns: (list) : List of strings containing full paths to all output products created ''' final_products = [] if not product_name: product_name = '{0}_{1}'.format(xarray_obj.source_name, varname) from geoips2.data_manipulations.info import percent_unmasked covg = percent_unmasked(interp_data) if covg > 0: # test a data coverage criterion if is_sector_type(area_def, 'atcf'): # Get the output filename from sector, and xarray objects atcf_fname = output_atcf_fname(area_def, xarray_obj, product_name, covg) # product_dir=product_name, # source_dir=product_name) atcf_fname_clean = output_atcf_fname(area_def, xarray_obj, product_name + 'Clear', covg) # product_dir=product_name, # source_dir=product_name) # Create the matplotlib color info dict - the fields in this dictionary (cmap, norm, boundaries, # etc) will be used in plot_image to ensure the image matches the colorbar. # selelec a relative color scheme depending on Hi or Lo frequency channels if varname in ['H19', 'V19', 'H37', 'V37', 'tb37H', 'tb19H']: # color scheme for low frequency from geoips2.image_utils.mpl_utils import set_matplotlib_colors_37H mpl_colors_info = set_matplotlib_colors_37H(min_tb=MIN_TB_LO, max_tb=MAX_TB_LO) else: if varname in ['Chan2_AT', 'Chan3_AT', 'Chan4_AT', 'Chan5_AT']: # default for high frequency (150-190 GHz) color scheme from geoips2.image_utils.mpl_utils import set_matplotlib_colors_150H mpl_colors_info = set_matplotlib_colors_150H(min_tb=110, max_tb=310) else: # default for high frequency (85-91 GHz) color scheme from geoips2.image_utils.mpl_utils import set_matplotlib_colors_89H mpl_colors_info = set_matplotlib_colors_89H( min_tb=MIN_TB_HI, max_tb=MAX_TB_HI) from geoips2.output_formats.image import create_standard_imagery final_products += create_standard_imagery( area_def, plot_data=interp_data, xarray_obj=xarray_obj, product_name_title=product_name, clean_fname=atcf_fname_clean, annotated_fname=atcf_fname, mpl_colors_info=mpl_colors_info, boundaries_info=BOUNDARIES_INFO, gridlines_info=GRIDLINES_INFO) else: LOG.info('Insufficient coverage, skipping') # This generates YAML files of sector-related metadata for all products in the final_products list final_products += produce_all_sector_metadata(final_products, area_def, xarray_obj) return final_products
def output_metoctiff(product_name, area_def, sect_xarray, interp_data, requested_data_min=None, requested_data_max=None, scale_data_min=1, scale_data_max=255, missing_value=0, units=None, coverage=None, mpl_cmap=None, product_key='', existing_image=None, atcf_dir='atcf'): output_products = [] from geoips2.sector_utils.utils import is_sector_type if not is_sector_type(area_def, 'atcf'): LOG.warning('NOT a TC sector, skipping ATCF output') return output_products resolution = int(max(area_def.pixel_size_x, area_def.pixel_size_y) / 1000) from geoips2.output_formats.metoctiff import metoctiff from geoips2.filenames.atcf_filenames import metoctiff_filename, atcf_web_filename from geoips2.filenames.base_paths import PATHS as gpaths prodname = '{0}{1}'.format(product_name, product_key) start_dt = sect_xarray.start_datetime end_dt = sect_xarray.end_datetime # resolution = max(area_def.pixel_size_x, area_def.pixel_size_y) / 1000.0 platform_filename = sect_xarray.platform_name source_filename = sect_xarray.source_name prod_filename = prodname mtif_tag_data_name = prodname mtif_tag_data_platform = sect_xarray.platform_name mtif_tag_product_description = 'None' if platform_filename in MTIF_FNAME_SATLIST.keys(): platform_filename = MTIF_FNAME_SATLIST[sect_xarray.platform_name] if source_filename in MTIF_FNAME_SENSORLIST.keys(): source_filename = MTIF_FNAME_SENSORLIST[sect_xarray.source_name] if prod_filename in MTIF_FNAME_PRODLIST.keys(): if '{0:d}' in MTIF_FNAME_PRODLIST[prodname]: prod_filename = MTIF_FNAME_PRODLIST[prodname].format(resolution) else: prod_filename = MTIF_FNAME_PRODLIST[prodname] if sect_xarray.platform_name + prodname in MTIF_TAG_DATA_NAME_LIST.keys(): # These are based off of satellite and product mtif_tag_data_name = MTIF_TAG_DATA_NAME_LIST[sect_xarray.platform_name + prodname] if mtif_tag_data_platform in MTIF_TAG_DATA_PLATFORM_LIST.keys(): mtif_tag_data_platform = MTIF_TAG_DATA_PLATFORM_LIST[ mtif_tag_data_platform] if sect_xarray.platform_name + prodname in MTIF_TAG_PRODUCT_DESCRIPTION_LIST.keys( ): # These are based off of satellite and product mtif_tag_product_description = MTIF_TAG_PRODUCT_DESCRIPTION_LIST[ sect_xarray.platform_name + prodname] if area_def.sector_type == 'atcf': # Create metoctiffs from the Nearest Neighbor interpolation mtif_fname = metoctiff_filename( basedir=gpaths['TCWWW'], tc_year=int(area_def.sector_info['storm_year']), tc_basin=area_def.sector_info['storm_basin'], tc_stormnum=int(area_def.sector_info['storm_num']), product_name=prod_filename, source_name=source_filename, platform_name=platform_filename, coverage=coverage, product_datetime=start_dt, atcf_dir=atcf_dir) import math # I think corners may NOT be ordered. area_extent_ll is explicitly (LL_Lon, LL_Lat, UR_Lon, UR_Lat) minlat = area_def.area_extent_ll[1] maxlat = area_def.area_extent_ll[3] minlon = area_def.area_extent_ll[0] maxlon = area_def.area_extent_ll[2] ullat_radians = math.radians(maxlat) urlat_radians = math.radians(maxlat) lllat_radians = math.radians(minlat) lrlat_radians = math.radians(minlat) ullon_radians = math.radians(minlon) urlon_radians = math.radians(maxlon) lllon_radians = math.radians(minlon) lrlon_radians = math.radians(maxlon) uclat_radians = ullat_radians lclat_radians = lllat_radians uclon_radians = math.radians(area_def.proj_dict['lon_0']) lclon_radians = math.radians(area_def.proj_dict['lon_0']) # 0 to 360 # import math # if lllon_rad > lrlon_rad and lrlon_rad < 0: # lrlon_rad = lrlon_rad + 2 * math.pi # if ullon_rad > urlon_rad and urlon_rad < 0: # urlon_rad = urlon_rad + 2 * math.pi if not units: units = 'unk' if not coverage: coverage = 'unk' output_products += metoctiff( interp_data, ullat_radians, urlat_radians, lllat_radians, lrlat_radians, uclat_radians, lclat_radians, ullon_radians, urlon_radians, lllon_radians, lrlon_radians, uclon_radians, lclon_radians, data_start_datetime=start_dt, data_end_datetime=end_dt, product_name=mtif_tag_data_name, platform_name=mtif_tag_data_platform, data_units=units, output_filename=mtif_fname, requested_data_min=requested_data_min, requested_data_max=requested_data_max, scale_data_min=scale_data_min, scale_data_max=scale_data_max, missing_value=missing_value, mpl_cmap=mpl_cmap, product_description=mtif_tag_product_description, existing_image=existing_image, gzip_output=True) # lons, lats = area_def.get_lonlats() # from geoips2.xarray_utils.data import get_lat_lon_points, get_lat_lon_points_numpy # LOG.info('min/max lon %s %s min/max lat %s %s', lons.min(), lons.max(), lats.min(), lats.max()) # checklat = -10; checklon = 163; diff = 0.01 # get_lat_lon_points(checklat, checklon, diff, sect_xarray, 'B14BT') # get_lat_lon_points_numpy(checklat, checklon, diff, lats, lons, interp_data) # from IPython import embed as shell; shell() return output_products
def output_atcf_fname(area_def, wind_xarray, prodname, covg, output_type='png', output_type_dir=None, product_dir=None, output_old_tc_web=False): from geoips2.sector_utils.utils import is_sector_type if area_def and not is_sector_type(area_def, 'atcf'): LOG.warning('NOT a TC sector, skipping ATCF output') return None if not product_dir: product_dir = prodname if not output_type_dir: output_type_dir = output_type # This allows you to explicitly set matplotlib parameters (colorbars, titles, etc). Overrides were placed in # geoimgbase.py to allow using explicitly set values rather than geoimgbase determined defaults. # Return reused parameters (min/max vals for normalization, colormaps, matplotlib Normalization) from geoips2.output_formats.metoctiff import metoctiff from geoips2.filenames.atcf_filenames import metoctiff_filename, atcf_web_filename from geoips2.filenames.old_tcweb_fnames import old_tcweb_fnames from geoips2.filenames.base_paths import PATHS as gpaths from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp # start_dt = get_min_from_xarray_timestamp(wind_xarray, 'timestamp') start_dt = wind_xarray.start_datetime resolution = max(area_def.pixel_size_x, area_def.pixel_size_y) / 1000.0 if area_def.sector_info['wind_speed']: intensity = '{0:0.0f}kts'.format(area_def.sector_info['wind_speed']) else: # This is pulling intensity directly from the deck file, and sometimes it is not defined - if empty, just # use "unknown" for intensity intensity = 'unknown' extra = '{0:0.1f}'.format(resolution).replace('.', 'p') web_fname = atcf_web_filename( basedir=gpaths['TCWWW'], tc_year=int(area_def.sector_info['storm_year']), tc_basin=area_def.sector_info['storm_basin'], tc_stormnum=int(area_def.sector_info['storm_num']), output_type=output_type, product_name=prodname, product_dir=product_dir, source_name=wind_xarray.source_name, platform_name=wind_xarray.platform_name, coverage=covg, product_datetime=start_dt, intensity=intensity, extra=extra, output_type_dir=output_type_dir) if output_old_tc_web is True: web_fname = old_tcweb_fnames( basedir=gpaths['TCWWW'], tc_year=int(area_def.sector_info['storm_year']), tc_basin=area_def.sector_info['storm_basin'], tc_stormnum=int(area_def.sector_info['storm_num']), tc_stormname=area_def.sector_info['final_storm_name'], output_type='jpg', product_name=prodname, source_name=wind_xarray.source_name, platform_name=wind_xarray.platform_name, coverage=covg, product_datetime=start_dt, intensity=intensity, extra=extra) return web_fname
def output_windspeed_text(wind_xarray, area_def=None, overwrite=True, append=False, creation_time=None): ''' Output windspeed text file to the appropriate location, based on attributes of wind_xarray object Parameters: wind_xarray (xarray.Dataset): Required to determine attributes and data to write to text file sector (geoips.Sector): Optional to determine dynamic TC file location Returns: (list) list of text products generated ''' output_products = [] from geoips2.sector_utils.utils import is_sector_type if area_def and not is_sector_type(area_def, 'atcf'): LOG.warning('NOT a TC sector, skipping ATCF output') return output_products from geoips2.filenames.atcf_filenames import atcf_storm_text_windspeeds_filename, atcf_full_text_windspeeds_filename from geoips2.filenames.base_paths import PATHS as gpaths from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp # start_dt = get_min_from_xarray_timestamp(wind_xarray, 'timestamp') start_dt = wind_xarray.start_datetime if area_def: text_fname = atcf_storm_text_windspeeds_filename( basedir=gpaths['TCWWW'], tc_year=int(area_def.sector_info['storm_year']), tc_basin=area_def.sector_info['storm_basin'], tc_stormnum=int(area_def.sector_info['storm_num']), platform_name=wind_xarray.platform_name, product_datetime=start_dt, data_provider=wind_xarray.data_provider) else: text_fname = atcf_full_text_windspeeds_filename( basedir=gpaths['ATCFDROPBOX'], source_name=wind_xarray.source_name, platform_name=wind_xarray.platform_name, data_provider=wind_xarray.data_provider, product_datetime=start_dt, creation_time=creation_time) from os.path import exists if exists(text_fname) and not overwrite and not append: LOG.info('File already exists, not overwriting %s', text_fname) return output_products from geoips2.output_formats.text_winds import atcf_text_windspeeds # NOTE long does not exist in Python 3, so changed this to int. This will # limit us to 32 bit integers within Python 2 # time_array = wind_xarray['timestamp'].to_masked_array().astype(long).flatten() time_array = wind_xarray['timestamp'].to_masked_array().astype( int).flatten() # This results in an array of POSIX timestamps - seconds since epoch. time_array = time_array / 10**9 wind_dir = None if hasattr(wind_xarray, 'wind_dir_deg_met'): wind_dir = wind_xarray['wind_dir_deg_met'].to_masked_array().flatten() atcf_text_windspeeds( text_fname, wind_xarray['wind_speed_kts'].to_masked_array().flatten(), time_array, wind_xarray['longitude'].to_masked_array().flatten(), wind_xarray['latitude'].to_masked_array().flatten(), wind_xarray.platform_name, dir_array=wind_dir, append=append) output_products = [text_fname] return output_products
def visir(xarray_datasets, area_def): ''' This is a template for creating an external algorithm for operating on arbitrary data types from the datafile, outputting required data products, and plotting as needed. Most of standard GeoIPS processing is bypassed for these algorithm types. NOTE in geoips/geoimg/plot/prototypealg.py scifile is converted to xarray BEFORE being passed (standard variables latitude*, longitude*, timestamp*) sector is converted to area_def BEFORE being passed (standard attributes sector_*) from geoips2.geoips1_utils.scifile import xarray_from_scifile from geoips2.geoips1_utils.sector import area_def_from_sector Args: xarray_dataset (xarray.Dataset) : xarray Dataset object including all required variables. area_def (AreaDefinition) : pyresample AreaDefinition object specifying the current location information. Returns: (None). since all processing, outputs, and plotting is complete prior to returning to the GeoIPS 1.0 process flow. No automated plotting is performed from process.py fo prototype algorithms, all plotting must be performed from geoips2.r ''' final_products = [] # Assuming since this is the single channel algorithm, that we are only pulling one variable for xarray_dataset in xarray_datasets: for currvarname in VARLIST[xarray_dataset.source_name]: if currvarname not in xarray_dataset.variables.keys(): LOG.info('%s not in xarray_dataset, skipping', currvarname) continue varname = currvarname LOG.info('Running on variable %s in xarray_dataset with shape %s', varname, xarray_dataset['latitude'].dims) product_name = PRODNAME_LIST[xarray_dataset.source_name + varname] if is_dynamic_sector(area_def): LOG.info('Trying to sector %s with dynamic time %s, %s points', area_def.area_id, area_def.sector_start_datetime, xarray_dataset['latitude'].size) else: LOG.info('Trying to sector %s, %s points', area_def.area_id, xarray_dataset['latitude'].size) xarray_dataset[varname].attrs['units'] = DATA_UNITSLIST[ product_name] if DATA_UNITSLIST[product_name] == 'Kelvin' and PRODUCT_UNITS_LIST[ product_name] == 'celsius': if 'units' in xarray_dataset[varname].attrs and xarray_dataset[ varname].units == 'celsius': LOG.info('%s already in celsius, not converting', varname) else: xarray_dataset[ varname] = xarray_dataset[varname] + KtoC_conversion xarray_dataset[varname].attrs['units'] = 'celsius' from geoips2.xarray_utils.data import sector_xarray_dataset # Pass all 4 variables to sector_xarray_dataset, so they are all masked appropriately for pulling min/max vals varlist = [varname, 'latitude', 'longitude'] if product_name in MIN_SUNZEN_LIST.keys(): varlist += ['SunZenith'] # Grab an extra +- 3 degrees so if we read in the pre-sectored dataset, we will have extra data for # re-centering sect_xarray = sector_xarray_dataset(xarray_dataset, area_def, varlist, lon_pad=3, lat_pad=3) # Well this is annoying. numpy arrays fail if numpy_array is None, and xarrays fail if x_array == None if sect_xarray is None: continue sect_xarray.attrs['area_def'] = area_def sect_xarray.attrs['start_datetime'] = xarray_dataset.start_datetime sect_xarray.attrs['end_datetime'] = xarray_dataset.end_datetime LOG.info('Sectored data start/mid/end datetime: %s %s, %s points', sect_xarray.start_datetime, sect_xarray.end_datetime, numpy.ma.count(sect_xarray[varname].to_masked_array())) from geoips2.xarray_utils.interpolation import interp_nearest from geoips2.data_manipulations.info import percent_unmasked if product_name in MIN_SUNZEN_LIST.keys(): from geoips2.data_manipulations.corrections import mask_night LOG.info('Percent unmasked day/night %s', percent_unmasked(sect_xarray[varname])) sect_xarray[varname] = xarray.DataArray( mask_night(sect_xarray[varname].to_masked_array(), sect_xarray['SunZenith'].to_masked_array(), MIN_SUNZEN_LIST[product_name])) LOG.info('Percent unmasked day only %s', percent_unmasked(sect_xarray[varname])) if product_name in SUNZEN_CORRECTION_LIST: from geoips2.data_manipulations.corrections import apply_solar_zenith_correction sect_xarray[varname] = xarray.DataArray( apply_solar_zenith_correction( sect_xarray[varname].to_masked_array(), sect_xarray['SunZenith'].to_masked_array())) if product_name in GAMMA_LIST.keys(): from geoips2.data_manipulations.corrections import apply_gamma for gamma in GAMMA_LIST[product_name]: sect_xarray[varname] = xarray.DataArray( apply_gamma(sect_xarray[varname].to_masked_array(), gamma)) if product_name in SCALEFACTOR_LIST.keys(): from geoips2.data_manipulations.corrections import apply_scale_factor sect_xarray[varname] = xarray.DataArray( apply_scale_factor(sect_xarray[varname].to_masked_array(), SCALEFACTOR_LIST[product_name])) LOG.info('min/max before: %s to %s', sect_xarray[varname].min(), sect_xarray[varname].max()) [interp_data] = interp_nearest(area_def, sect_xarray, varlist=[varname]) LOG.info('min/max after: %s to %s', interp_data.min(), interp_data.max()) LOG.info('Percent unmasked before %s', percent_unmasked(interp_data)) covg = percent_unmasked(interp_data) LOG.info('Percent unmasked after %s', percent_unmasked(interp_data)) if covg > 0: final_products += write_product_xarray( sect_xarray, [varname, 'latitude', 'longitude'], [product_name, 'latitude', 'longitude']) from geoips2.xarray_utils.outputs import output_atcf_fname, output_metoctiff # from geoips2.output_formats.image import plot_image from geoips2.image_utils.mpl_utils import set_matplotlib_colors_standard cbar_label = '{0} ({1})'.format(VARTYPE_TITLE[product_name], xarray_dataset[varname].units) LOG.info('data min val %s to max va %s', interp_data.min(), interp_data.max()) # Create the matplotlib color info dict - the fields in this dictionary (cmap, norm, boundaries, # etc) will be used in plot_image to ensure the image matches the colorbar. mpl_colors_info = set_matplotlib_colors_standard( DATARANGE_LIST[product_name], cmap_name=CMAPLIST[product_name], cbar_label=cbar_label) from geoips2.sector_utils.utils import is_sector_type if is_sector_type(area_def, 'atcf'): # from geoips2.sector_utils.atcf_tracks import run_archer # run_archer(sect_xarray, varname) # get filename from objects atcf_fname = output_atcf_fname( area_def, sect_xarray, product_name, covg, output_type='png', output_type_dir=PNGDIRS[sect_xarray.source_name], product_dir=product_name) atcf_fname_clean = output_atcf_fname( area_def, sect_xarray, product_name, covg, output_type='png', output_type_dir=PNGCLEANDIRS[sect_xarray.source_name], product_dir=product_name) # generate images if product_name in PRODTYPES['Infrared']: # setup a special color map for Infrared images at 11 um from geoips2.image_utils.mpl_utils import set_matplotlib_colors_IR curr_min_tb = -90 curr_max_tb = 30 mpl_colors_info = set_matplotlib_colors_IR( min_tb=curr_min_tb, max_tb=curr_max_tb) # call plotting function from geoips2.output_formats.image import create_standard_imagery final_products += create_standard_imagery( area_def, plot_data=interp_data, xarray_obj=sect_xarray, product_name_title=product_name, clean_fname=atcf_fname_clean, annotated_fnames=[atcf_fname], mpl_colors_info=mpl_colors_info, boundaries_info=BOUNDARIES_INFO, gridlines_info=GRIDLINES_INFO, remove_duplicate_minrange=10) # output Metoctiff files final_products += output_metoctiff( product_name, area_def, sect_xarray, interp_data, requested_data_min=DATARANGE_LIST[product_name][0], requested_data_max=DATARANGE_LIST[product_name][1], scale_data_min=MIN_SCALE_VALUE, scale_data_max=MAX_SCALE_VALUE, missing_value=MISSING_SCALE_VALUE, units=xarray_dataset[varname].units, mpl_cmap=mpl_colors_info['cmap'], coverage=covg, atcf_dir=ATCFDIRS[sect_xarray.source_name]) else: from geoips2.xarray_utils.outputs import output_geoips_fname # get filename from objects web_fname = output_geoips_fname(area_def, sect_xarray, product_name, covg) from geoips2.output_formats.image import create_standard_imagery final_products += create_standard_imagery( area_def, plot_data=interp_data, xarray_obj=sect_xarray, product_name_title=product_name, clean_fname=None, annotated_fnames=[web_fname], mpl_colors_info=mpl_colors_info) else: LOG.info('Insufficient coverage, skipping') from geoips2.output_formats.metadata import produce_all_sector_metadata final_products += produce_all_sector_metadata( final_products, area_def, xarray_dataset, metadata_dir=METADATADIRS[xarray_dataset.source_name]) return final_products
def template(xarray_datasets, area_def): ''' Template for creating a simple product from a single channel from each supported data type. All products found in geoips2/products must have consistent call signatures, and require no return values. All data processing, plotting, and data and imagery outputs must be completed within the product algorithm. Call signature for all products found in geoips2/products/<product_name>.py: def <product_name>(xarray_datasets, area_def): NOTE FOR GEOIPS1 Interface: Link directly from geoips/productfiles/<source_name>/<product_name>.xml to geoips2.products.<product_name>.<product_name> Must be consistently named. in geoips/geoimg/plot/prototypealg.py scifile is converted to xarray BEFORE being passed (standard variables latitude, longitude, timestamp) sector is converted to area_def BEFORE being passed (standard attributes sector_*) from geoips2.geoips1_utils.scifile import xarray_from_scifile from geoips2.geoips1_utils.sector import area_def_from_sector NOTE FOR GEOIPS2 Prototype Interface: This will be imported and called from modules found in: geoips2/drivers/ Args: xarray_datasets (list of xarray.Dataset) : list of xarray Dataset objects including all required variables. Required variables: 'latitude', 'longitude' Optional variable; 'timestamp' Required attributes: 'start_datetime', 'end_datetime', 'source_name', 'platform_name', 'data_provider' area_def (AreaDefinition) : pyresample AreaDefinition object specifying the current location information. This includes standard pyresample AreaDefinition attributes, as well as additional dynamic or static sector info (ie, storm name, basin, etc for TCs) Extra GeoIPS 2 required attributes: 'sector_info', 'sector_type' optional attributes (if included, assumed dynamic): 'sector_start_datetime', 'sector_end_datetime' Returns: (None). All processing, outputs, and plotting is complete at the end of the product scripts - no return value ''' final_outputs = [] LOG.info('Running area_def %s', area_def) # Just write out registered datafile ... # start_dtstr = xarray_datasets[0].start_datetime.strftime('%Y%m%d.%H%M') # ncfname = start_dtstr+area_def.area_id+'.nc' # from geoips2.xarray_utils.outputs import write_xarray_netcdf from IPython import embed as shell # write_xarray_netcdf(xarray_datasets[0], ncfname) full_xarray = None # Assuming for template, we are just plotting a single variable per file, so loop through until we find it. # Note some "source_names" can have multiple file types, but there will only be one of # the listed variables per file. for xarray_dataset in xarray_datasets: source_name = xarray_dataset.source_name for currvarname in VARLIST[source_name]: if currvarname not in xarray_dataset.variables.keys(): # LOG.info('SKIPPING variable %s, not in current xarray object', currvarname) continue LOG.info('FOUND variable %s, in xarray dataset with dims %s', currvarname, xarray_dataset['latitude'].dims) varname = currvarname full_xarray = xarray_dataset if full_xarray is None: LOG.info('FAILED variable %s not found in any xarray Datasets') return final_outputs # Base product name here is "Template" for plotting titles, etc product_name = 'Template' # If we want to override the default radius of influence, pull it from the list above. if source_name in ROILIST.keys(): full_xarray.attrs['interpolation_radius_of_influence'] = ROILIST[ source_name] # logging information for what we are sectoring (temporally and spatially for dynamic, just temporally for static if hasattr(area_def, 'sector_start_datetime') and area_def.sector_start_datetime: LOG.info('Trying to sector %s with dynamic time %s, %s points', area_def.area_id, area_def.sector_start_datetime, full_xarray['latitude'].size) else: LOG.info('Trying to sector %s, %s points', area_def.area_id, full_xarray['latitude'].size) # Compile a list of variables that will be used to sector - the current data variable, and we will add in # the appropriate latitude and longitude variables (of the same shape as data), and if it exists the # appropriately shaped timestamp array vars_to_sect = [varname, 'latitude', 'longitude'] if 'timestamp' in full_xarray.variables.keys(): vars_to_sect += ['timestamp'] # The list of variables in vars_to_sect must ALL be the same shape from geoips2.xarray_utils.data import sector_xarray_dataset sect_xarray = sector_xarray_dataset(full_xarray, area_def, vars_to_sect) # numpy arrays fail if numpy_array is None, and xarrays fail if x_array == None if sect_xarray is None: LOG.info('No coverage - skipping') return final_outputs sect_xarray.attrs['area_def'] = area_def # If you have normal old 2d arrays, you don't have to worry about any of the array_nums pieces. array_nums = [None] if len(sect_xarray[varname].shape) == 3: array_nums = range(0, sect_xarray[varname].shape[2]) from geoips2.xarray_utils.interpolation import interp_nearest from geoips2.xarray_utils.outputs import output_geoips_fname from geoips2.data_manipulations.info import percent_unmasked from geoips2.image_utils.mpl_utils import set_matplotlib_colors_standard final_products = [] for array_num in array_nums: # Just pass array_num=None if it is a single 2d array [interp_data] = interp_nearest(area_def, sect_xarray, varlist=[varname], array_num=array_num) data_range = [interp_data.min(), interp_data.max()] cmap_name = None cbar_label = None if sect_xarray.source_name in CBAR_LABEL_LIST: cbar_label = CBAR_LABEL_LIST[sect_xarray.source_name] if sect_xarray.source_name in CMAP_LIST: cmap_name = CMAP_LIST[sect_xarray.source_name] if sect_xarray.source_name in DATA_RANGE_LIST: data_range = DATA_RANGE_LIST[sect_xarray.source_name] covg = percent_unmasked(interp_data) if covg > 0: from geoips2.sector_utils.utils import is_sector_type if is_sector_type(area_def, 'atcf'): # get filename from objects prodname = '{0}_{1}'.format(product_name, varname) from geoips2.xarray_utils.outputs import output_atcf_fname atcf_fname = output_atcf_fname(area_def, sect_xarray, prodname, covg) atcf_fname_clean = output_atcf_fname(area_def, sect_xarray, prodname + 'Clean', covg) mpl_colors_info = set_matplotlib_colors_standard( data_range=data_range, cmap_name=cmap_name, cbar_label=cbar_label) from geoips2.output_formats.image import create_standard_imagery final_products += create_standard_imagery( area_def, plot_data=interp_data, xarray_obj=sect_xarray, product_name_title=prodname, clean_fname=atcf_fname_clean, annotated_fnames=[atcf_fname], mpl_colors_info=mpl_colors_info) else: # Get the output filename from sector, and xarray objects prodname = '{0}_{1}'.format(product_name, varname) web_fname = output_geoips_fname(area_def, sect_xarray, prodname, covg, product_dir=product_name, source_dir=product_name) web_fname_clean = output_geoips_fname(area_def, sect_xarray, prodname + 'Clean', covg, product_dir=product_name, source_dir=product_name) mpl_colors_info = set_matplotlib_colors_standard( data_range=data_range, cmap_name=cmap_name, cbar_label=cbar_label) from geoips2.output_formats.image import create_standard_imagery final_products += create_standard_imagery( area_def, plot_data=interp_data, xarray_obj=sect_xarray, product_name_title=prodname, clean_fname=web_fname_clean, annotated_fnames=[web_fname], mpl_colors_info=mpl_colors_info) else: LOG.info('Insufficient coverage, skipping') LOG.info('The following products were successfully produced from %s', __file__) for final_product in final_products: LOG.info('PRODUCTSUCCESS %s', final_product) from geoips2.output_formats.metadata import produce_all_sector_metadata final_products += produce_all_sector_metadata(final_products, area_def, full_xarray) return final_products
def sector_xarrays(xobjs, area_def, varlist, verbose=False, hours_before_sector_time=18, hours_after_sector_time=6): '''Return list of sectored xarray objects ''' import numpy ret_xobjs = [] for xobj in xobjs: # Compile a list of variables that will be used to sector - the current data variable, and we will add in # the appropriate latitude and longitude variables (of the same shape as data), and if it exists the # appropriately shaped timestamp array vars_to_interp = list(set(varlist) & set(xobj.variables.keys())) if not vars_to_interp: LOG.info('No required variables, skipping dataset') continue from geoips2.sector_utils.utils import is_dynamic_sector if is_dynamic_sector(area_def): LOG.info('Trying to sector %s with dynamic time %s, %s points', area_def.area_id, area_def.sector_start_datetime, xobj['latitude'].size) else: LOG.info('Trying to sector %s, %s points', area_def.area_id, xobj['latitude'].size) vars_to_sect = [] vars_to_sect += vars_to_interp # we have to have 'latitude','longitude" in the full_xarray, and 'timestamp' if we want temporal sectoring if 'latitude' in xobj.variables.keys(): vars_to_sect += ['latitude'] if 'longitude' in xobj.variables.keys(): vars_to_sect += ['longitude'] if 'timestamp' in xobj.variables.keys(): vars_to_sect += ['timestamp'] from geoips2.xarray_utils.data import sector_xarray_dataset # The list of variables in vars_to_sect must ALL be the same shape sect_xarray = sector_xarray_dataset( xobj, area_def, vars_to_sect, verbose=verbose, hours_before_sector_time=hours_before_sector_time, hours_after_sector_time=hours_after_sector_time) # numpy arrays fail if numpy_array is None, and xarrays fail if x_array == None if sect_xarray is None: if verbose: LOG.info('No coverage - skipping dataset') continue from geoips2.sector_utils.utils import is_sector_type if is_sector_type(area_def, 'atcf'): from geoips2.sector_utils.utils import check_center_coverage has_covg, covg_xarray = check_center_coverage( sect_xarray, area_def, varlist=vars_to_sect, covg_varname=vars_to_sect[0], width_degrees=8, height_degrees=8, verbose=verbose) if not has_covg: LOG.info('SKIPPING NO COVERAGE IN center box - NOT PROCESSING') continue # If the time within the box is > 50 min, we have two overpasses. ALL PMW sensors are polar orbiters. if (covg_xarray.end_datetime - covg_xarray.start_datetime).seconds > 3000: LOG.info( 'Original sectored xarray contains more than one overpass - switching to start/datetime in center' ) sect_xarray.attrs[ 'start_datetime'] = covg_xarray.start_datetime sect_xarray.attrs['end_datetime'] = covg_xarray.end_datetime sect_xarray.attrs[ 'area_def'] = area_def # add name of this sector to sector attribute if hasattr(sect_xarray, 'timestamp'): from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp from geoips2.xarray_utils.timestamp import get_max_from_xarray_timestamp sect_xarray.attrs[ 'start_datetime'] = get_min_from_xarray_timestamp( sect_xarray, 'timestamp') sect_xarray.attrs['end_datetime'] = get_max_from_xarray_timestamp( sect_xarray, 'timestamp') # Note: need to test whether above two lines can reselect min and max time_info for this sector LOG.info( 'Sectored data start/end datetime: %s %s, %s points from var %s, all vars %s', sect_xarray.start_datetime, sect_xarray.end_datetime, numpy.ma.count(sect_xarray[vars_to_interp[0]].to_masked_array()), vars_to_interp[0], vars_to_interp) ret_xobjs += [sect_xarray] return ret_xobjs