def gmi_hdf5(fnames, metadata_only=False, chans=None):
    ''' Read GMI hdf5 data products.

    All GeoIPS 2.0 readers read data into xarray Datasets - a separate
    dataset for each shape/resolution of data - and contain standard metadata information.

    Args:
        fnames (list): List of strings, full paths to files
        metadata_only (Optional[bool]):
            * DEFAULT False
            * return before actually reading data if True
        chans (Optional[list of str]):
            * NOT IMPLEMENTED
                * DEFAULT None (include all channels)
                * List of desired channels (skip unneeded variables as needed)

    Returns:
        list of xarray.Datasets: list of xarray.Dataset objects with required
            Variables and Attributes: (See geoips2/docs :doc:`xarray_standards`)
            
    '''

    import os
    from datetime import datetime
    import numpy as np
    import xarray as xr
    #from IPython import embed as shell

    #fname='data_gmi/20200518.203639.gpm.gmi.gpm_pps.x.gmi.TB2016.x.TB2016_1b_v05a.h5'

    LOG.info('Reading files %s', fnames)

    xarray_gmi = xr.Dataset()
    for fname in fnames:
        xarray_gmi = read_gmi_file(fname, xarray_gmi)

    # setup attributors
    from geoips2.xarray_utils.timestamp import get_datetime_from_datetime64
    from geoips2.xarray_utils.timestamp import get_max_from_xarray_timestamp, get_min_from_xarray_timestamp
    xarray_gmi.attrs['start_datetime'] = get_min_from_xarray_timestamp(
        xarray_gmi, 'timestamp')
    xarray_gmi.attrs['end_datetime'] = get_max_from_xarray_timestamp(
        xarray_gmi, 'timestamp')
    xarray_gmi.attrs['source_name'] = 'gmi'
    xarray_gmi.attrs['platform_name'] = 'GPM'
    xarray_gmi.attrs['data_provider'] = 'NASA'
    xarray_gmi.attrs['original_source_filename'] = fname
    xarray_gmi.attrs['granule_minutes'] = 5

    # MTIFs need to be "prettier" for PMW products, so 2km resolution for final image
    xarray_gmi.attrs['sample_distance_km'] = 2
    xarray_gmi.attrs['interpolation_radius_of_influence'] = 12500

    return [xarray_gmi]
Example #2
0
def amsr2_ncdf(fnames, metadata_only=False):
    ''' Read AMSR2 netcdf data products. NOTE AMSR2 OCEAN wind products are in sfc_winds_ncdf.py

    All GeoIPS 2.0 readers read data into xarray Datasets - a separate
    dataset for each shape/resolution of data - and contain standard metadata information.

    Args:
        fnames (list): List of strings, full paths to files
        metadata_only (Optional[bool]):
            * DEFAULT False
            * return before actually reading data if True

    Returns:
        list of xarray.Datasets: list of xarray.Dataset objects with required Variables and Attributes:
            * See geoips2/docs :doc:`xarray_standards` 
   '''
    import xarray
    fname = fnames[0]
    full_xarray = xarray.open_dataset(str(fname))
    full_xarray.attrs['data_provider'] = 'unknown'
    full_xarray.attrs['original_source_filename'] = fname
    full_xarray.attrs['source_name'] = 'amsr2'
    full_xarray.attrs['platform_name'] = 'gcom-w1'
    full_xarray.attrs['interpolation_radius_of_influence'] = 10000
    if 'creator_name' in full_xarray.attrs and 'NOAA' in full_xarray.creator_name:
        full_xarray.attrs['data_provider'] = 'star'
    full_xarray.attrs['minimum_coverage'] = 20
    LOG.info('Read data from %s', fname)

    if hasattr(full_xarray, 'title') and 'AMSR2_OCEAN' in full_xarray.title:
        xarrays = read_amsr_winds(full_xarray)

    elif hasattr(full_xarray, 'title') and 'MBT' in full_xarray.title:
        xarrays = read_amsr_data(full_xarray)

    elif hasattr(full_xarray, 'title') and 'PRECIP' in full_xarray.title:
        xarrays = read_amsr_data(full_xarray)

    for curr_xarray in xarrays:
        LOG.info('Setting standard metadata')
        from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp, get_max_from_xarray_timestamp
        curr_xarray.attrs['start_datetime'] = get_min_from_xarray_timestamp(
            curr_xarray, 'timestamp')
        curr_xarray.attrs['end_datetime'] = get_max_from_xarray_timestamp(
            curr_xarray, 'timestamp')
    return xarrays
Example #3
0
def sfc_winds_ncdf(fnames, metadata_only=False):
    ''' Read one of SAR, SMAP, SMOS, AMSR derived winds from netcdf data.
        Parameters:
            fnames (list): Required list of strings to full paths of netcdf files to read
        Returns:
            xarray.Dataset with required Variables and Attributes:
                Variables: 'latitude', 'longitude', 'timestamp', 'wind_speed_kts'
                Attributes: 'source_name', 'platform_name', 'data_provider', 'interpolation_radius_of_influence'
                            'start_datetime', 'end_datetime'
                Optional Attrs: 'original_source_filename', 'filename_datetime'
    '''

    from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp, get_max_from_xarray_timestamp
    import xarray
    # Only SAR reads multiple files
    fname = fnames[0]
    wind_xarray = xarray.open_dataset(str(fname))
    wind_xarray.attrs['data_provider'] = 'unknown'
    wind_xarray.attrs['source_name'] = 'unknown'
    wind_xarray.attrs['platform_name'] = 'unknown'
    wind_xarray.attrs['interpolation_radius_of_influence'] = 'unknown'
    wind_xarray.attrs['original_source_filename'] = fname
    wind_xarray.attrs['sample_distance_km'] = 'unknown'

    LOG.info('Read data from %s', fname)

    if hasattr(wind_xarray, 'source') and 'SAR' in wind_xarray.source\
       and hasattr(wind_xarray, 'title') and 'SAR' in wind_xarray.title:
        wind_xarrays = []
        columns = None
        for fname in fnames:
            LOG.info('    Reading file %s', fname)
            wind_xarray = xarray.open_dataset(str(fname))
            LOG.info('        rows: %s, columns: %s', wind_xarray.rows, wind_xarray.columns)
            if columns is None:
                columns = wind_xarray.columns
            if columns == wind_xarray.columns:
                wind_xarrays += read_sar_data(wind_xarray)
            else:
                LOG.info('            COLUMNS DOES NOT MATCH, NOT APPENDING')
        final_xarray = xarray.Dataset()
        import numpy
        lat_array = xarray.DataArray(numpy.vstack([curr_xarray.latitude.to_masked_array()
                                                   for curr_xarray in wind_xarrays]))
        lon_array = xarray.DataArray(numpy.vstack([curr_xarray.longitude.to_masked_array()
                                                   for curr_xarray in wind_xarrays]))
        timestamp_array = xarray.DataArray(numpy.vstack([curr_xarray.timestamp.to_masked_array()
                                                         for curr_xarray in wind_xarrays]))
        wspd_array = xarray.DataArray(numpy.vstack([curr_xarray.wind_speed_kts.to_masked_array()
                                                   for curr_xarray in wind_xarrays]))
        final_xarray['latitude'] = lat_array
        final_xarray['longitude'] = lon_array
        final_xarray['timestamp'] = timestamp_array
        final_xarray['wind_speed_kts'] = wspd_array
        final_xarray.attrs = wind_xarrays[0].attrs

        wind_xarrays = [final_xarray]

    if hasattr(wind_xarray, 'institution') and 'Remote Sensing Systems' in wind_xarray.institution:
        if hasattr(wind_xarray, 'title') and 'SMAP' in wind_xarray.title:
            wind_xarrays = read_remss_data(wind_xarray, 'smap')

        if hasattr(wind_xarray, 'title') and 'WindSat' in wind_xarray.title:
            wind_xarrays = read_remss_data(wind_xarray, 'windsat')

        if hasattr(wind_xarray, 'title') and 'AMSR2' in wind_xarray.title:
            wind_xarrays = read_remss_data(wind_xarray, 'amsr2')

    if hasattr(wind_xarray, 'platform') and 'SMOS' in wind_xarray.platform:
        # SMOS timestamp is not read in correctly natively with xarray - must pass fname so we can get time
        # information directly from netCDF4.Dataset open
        wind_xarrays = read_smos_data(wind_xarray, fname)

    if hasattr(wind_xarray, 'title') and 'AMSR2_OCEAN' in wind_xarray.title:
        from geoips2.readers.amsr2_ncdf import read_amsr_winds
        wind_xarrays = read_amsr_winds(wind_xarray)

    if hasattr(wind_xarray, 'title_short_name') and 'ASCAT' in wind_xarray.title_short_name:
        wind_xarrays = read_knmi_data(wind_xarray)

    if hasattr(wind_xarray, 'title_short_name') and 'OSCAT' in wind_xarray.title_short_name:
        wind_xarrays = read_knmi_data(wind_xarray)

    if hasattr(wind_xarray, 'institution') and 'Brigham Young University' in wind_xarray.institution:
        wind_xarrays = read_byu_data(wind_xarray)

    for wind_xarray in wind_xarrays:

        if not hasattr(wind_xarray, 'minimum_coverage'):
            wind_xarray.attrs['minimum_coverage'] = 20

        LOG.info('Setting standard metadata')
        wind_xarray.attrs['start_datetime'] = get_min_from_xarray_timestamp(wind_xarray, 'timestamp')
        wind_xarray.attrs['end_datetime'] = get_max_from_xarray_timestamp(wind_xarray, 'timestamp')

        if 'wind_speed_kts' in wind_xarray.variables:
            # These text files store wind speeds natively in kts
            wind_xarray['wind_speed_kts'].attrs['units'] = 'kts'

        LOG.info('Read data %s start_dt %s source %s platform %s data_provider %s roi %s native resolution',
                 wind_xarray.attrs['start_datetime'],
                 wind_xarray.attrs['source_name'],
                 wind_xarray.attrs['platform_name'],
                 wind_xarray.attrs['data_provider'],
                 wind_xarray.attrs['interpolation_radius_of_influence'],
                 wind_xarray.attrs['sample_distance_km'])

    return wind_xarrays
Example #4
0
def read_amsr_mbt(full_xarray, varname, timestamp=None):
    ''' Reformat AMSR xarray object appropriately
            variables: latitude, longitude, timestamp, brightness temperature variables
            attributes: source_name, platform_name, data_provider, interpolation_radius_of_influence'''
    import xarray
    LOG.info('Reading AMSR data %s', varname)
    sub_xarray = xarray.Dataset()
    sub_xarray.attrs = full_xarray.attrs.copy()
    # Set attributes appropriately

    # Mappings are Brightness_Temperature_89_GHz_AH -> Latitude_for_89A, etc
    # Mappings are Brightness_Temperature_10_GHzH -> Latitude_for_10, etc
    chanstr = varname.replace('Brightness_Temperature_', '')
    chanstr = chanstr.replace('_GHz_', '')
    chanstr = chanstr.replace('_GHz', '')
    chanstr = chanstr.replace('H', '')
    chanstr = chanstr.replace('V', '')

    # Set lat/lons appropriately
    sub_xarray['latitude'] = full_xarray['Latitude_for_{0}'.format(chanstr)]
    sub_xarray['longitude'] = full_xarray['Longitude_for_{0}'.format(chanstr)]
    sub_xarray[varnames[varname]] = full_xarray[varname]
    sub_xarray.set_coords(['latitude', 'longitude'])

    # https://www.ospo.noaa.gov/Products/atmosphere/gpds/about_amsr2.html
    # 37GHz, 7km x 12km ground resolution
    # 89GHz, 3km x 5km ground resolution

    # MTIFs need to be "prettier" for PMW products, so 2km resolution for all channels
    # sub_xarray.attrs['sample_distance_km'] = 3.0
    sub_xarray.attrs['sample_distance_km'] = 2.0
    sub_xarray.attrs['interpolation_radius_of_influence'] = 10000
    for dim in sub_xarray.dims.keys():
        if 'low_rez' in dim:
            # MTIFs need to be "prettier" for PMW products, so 2km resolution for all channels
            # sub_xarray.attrs['sample_distance_km'] = 7.0
            sub_xarray.attrs['sample_distance_km'] = 2.0
            sub_xarray.attrs['interpolation_radius_of_influence'] = 20000

    # See dictionaries above for appropriate land mask array locations for each variable
    full_xarray['LandMask'] = xarray.DataArray(
        full_xarray[land_var[chanstr]].to_masked_array()[
            land_num[chanstr], :, :],
        coords=full_xarray[varname].coords)

    if timestamp is None:
        import numpy
        # Set timestamp appropriately
        import pandas
        dtstrs = []
        LOG.info('Reading scan_times, for dims %s',
                 sub_xarray[varnames[varname]].dims)
        for scan_time in full_xarray['Scan_Time']:
            dtstrs += [
                '{0:04.0f}{1:02.0f}{2:02.0f}T{3:02.0f}{4:02.0f}{5:02.0f}'.
                format(*tuple([xx for xx in scan_time.values]))
            ]
        # Have to set it on the actual xarray so it becomes a xarray format time series (otherwise if you set it
        # directly to ts, it is a pandas format time series, and expand_dims doesn't exist).
        timestamps = pandas.to_datetime(dtstrs,
                                        format='%Y%m%dT%H%M%S',
                                        errors='coerce').tolist()
        LOG.info('    Setting list of times')
        tss = [
            timestamps
            for ii in range(0, sub_xarray[varnames[varname]].shape[1])
        ]
        LOG.info('    Setting timestamp DataArray')
        sub_xarray['timestamp'] = xarray.DataArray(
            data=numpy.array(tss).transpose(),
            coords=full_xarray[varname].coords,
            name='timestamp')
        sub_xarray = sub_xarray.set_coords(['timestamp'])
    else:
        LOG.info('Using existing scan_times, for dims %s',
                 sub_xarray[varnames[varname]].dims)
        sub_xarray['timestamp'] = timestamp
    from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp, get_max_from_xarray_timestamp
    sub_xarray.attrs['start_datetime'] = get_min_from_xarray_timestamp(
        sub_xarray, 'timestamp')
    sub_xarray.attrs['end_datetime'] = get_max_from_xarray_timestamp(
        sub_xarray, 'timestamp')
    return sub_xarray
Example #5
0
def pmw_mint(xarray_datasets, area_def, arg_dict=None):
    ''' Process xarray_dataset (xarray_datasets expected to be length 1 list) over area_def, with optional arg_dict.
    input xarray-based variables are defined in the readers with the GEOIPS2 framework

    Args:
        xarray_datasets (list) : list of xarray Dataset objects - for pmw_mint products, expect a length one list.
        area_def (AreaDefinition) : pyresample AreaDefinition specifying initial region to process.
        arg_dict (dict) : Dictionary of optional arguments (command_line_args are passed through)
    Returns:
        (list) : List of full paths to all products produed through pmw_mint processing
    '''

    LOG.info(arg_dict)
    final_products = []

    full_xarray = xarray_datasets[0]

    # DATASET_INFO is imported from readers.mint_ncdf - contains list of possible variables for each dataset
    for varname in DATASET_INFO[full_xarray.dataset_name]:

        if varname not in full_xarray.variables.keys():
            LOG.info('SKIPPING variable %s, not in current xarray object',
                     varname)
            continue

        # Interpolation radius of influence is set for each dataset separately in the mint_ncdf reader - adjust
        # in readers/mint_ncdf.py ROI_INFO dictionary
        # set_roi(full_xarray, varname)

        if area_def.sector_start_datetime:
            LOG.info('Trying to sector %s with dynamic time %s, %s points',
                     area_def.area_id, area_def.sector_start_datetime,
                     full_xarray['latitude'].size)
        else:
            LOG.info('Trying to sector %s, %s points', area_def.area_id,
                     full_xarray['latitude'].size)

        # Compile a list of variables that will be used to sector - the current data variable, and we will add in
        # the appropriate latitude and longitude variables (of the same shape as data), and if it exists the
        # appropriately shaped timestamp array
        vars_to_sect = [varname]  # create a new sect to list intended products

        # we have to have 'latitude','longitude" in the full_xarray, and 'timestamp' if we want temporal sectoring
        if 'latitude' in full_xarray.variables.keys():
            vars_to_sect += ['latitude']
        if 'longitude' in full_xarray.variables.keys():
            vars_to_sect += ['longitude']
        if 'timestamp' in full_xarray.variables.keys():
            vars_to_sect += ['timestamp']

        # I believe ARCHER can not have any masked data within the data grid, so create a separate smaller sector for
        # running archer.  The size of the "new" ARCHER sector could probably use some tweaking, though this worked
        # "out of the box" for my test case.
        # Probably in the end want to just run ARCHER first, get the new center, then create a new area_def with
        # the ARCHER center. and sector / register based on the ARCHER centered area_def. Ok, I'll just do that
        # really quickly.
        archer_area_def = set_atcf_area_def(area_def.sector_info,
                                            num_lines=500,
                                            num_samples=500,
                                            pixel_width=10000,
                                            pixel_height=10000)
        archer_xarray = sector_xarray_dataset(full_xarray, archer_area_def,
                                              vars_to_sect)

        try:
            from geoips2.sector_utils.atcf_tracks import run_archer
            in_dict, out_dict, score_dict = run_archer(archer_xarray, varname)
        except ValueError:
            from IPython import embed as shell
            shell()
            continue

        recentered_area_def = recenter_area_def(area_def, out_dict)

        # The list of variables in vars_to_sect must ALL be the same shape
        sect_xarray = sector_xarray_dataset(full_xarray, recentered_area_def,
                                            vars_to_sect)

        # numpy arrays fail if numpy_array is None, and xarrays fail if x_array == None
        if sect_xarray is None:
            LOG.info('No coverage - skipping')
            return final_products

        sect_xarray.attrs[
            'area_def'] = recentered_area_def  # add name of this sector to sector attribute
        if hasattr(sect_xarray, 'timestamp'):
            from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp
            from geoips2.xarray_utils.timestamp import get_max_from_xarray_timestamp
            sect_xarray.attrs[
                'start_datetime'] = get_min_from_xarray_timestamp(
                    sect_xarray, 'timestamp')
            sect_xarray.attrs['end_datetime'] = get_max_from_xarray_timestamp(
                sect_xarray, 'timestamp')
            # Note:  need to test whether above two lines can reselect min and max time_info for this sector

        LOG.info('Sectored data start/end datetime: %s %s, %s points',
                 sect_xarray.start_datetime, sect_xarray.end_datetime,
                 numpy.ma.count(sect_xarray[varname].to_masked_array()))

        array_nums = [None]  # data points?
        if len(sect_xarray[varname].shape) == 3:
            array_nums = range(0, sect_xarray[varname].shape[2])

        for array_num in array_nums:
            # selection of an intepolation scheme

            from geoips2.xarray_utils.interpolation import interp_nearest
            try:
                [interp_data] = interp_nearest(recentered_area_def,
                                               sect_xarray,
                                               varlist=[varname],
                                               array_num=array_num)
            except ValueError:
                from IPython import embed as shell
                shell()
            final_products += plot_interp_data(interp_data, sect_xarray,
                                               recentered_area_def, varname)

            from geoips2.xarray_utils.interpolation import interp_scipy_grid
            interp_data = interp_scipy_grid(recentered_area_def,
                                            sect_xarray,
                                            varname,
                                            array_num=array_num,
                                            method='linear')
            prodname = '{0}_{1}_GriddataLinear'.format(sect_xarray.source_name,
                                                       varname)
            final_products += plot_interp_data(interp_data,
                                               sect_xarray,
                                               recentered_area_def,
                                               varname,
                                               product_name=prodname)

    return final_products
Example #6
0
def sector_xarrays(xobjs,
                   area_def,
                   varlist,
                   verbose=False,
                   hours_before_sector_time=18,
                   hours_after_sector_time=6):
    '''Return list of sectored xarray objects '''
    import numpy
    ret_xobjs = []
    for xobj in xobjs:
        # Compile a list of variables that will be used to sector - the current data variable, and we will add in
        # the appropriate latitude and longitude variables (of the same shape as data), and if it exists the
        # appropriately shaped timestamp array
        vars_to_interp = list(set(varlist) & set(xobj.variables.keys()))
        if not vars_to_interp:
            LOG.info('No required variables, skipping dataset')
            continue

        from geoips2.sector_utils.utils import is_dynamic_sector
        if is_dynamic_sector(area_def):
            LOG.info('Trying to sector %s with dynamic time %s, %s points',
                     area_def.area_id, area_def.sector_start_datetime,
                     xobj['latitude'].size)
        else:
            LOG.info('Trying to sector %s, %s points', area_def.area_id,
                     xobj['latitude'].size)

        vars_to_sect = []
        vars_to_sect += vars_to_interp
        # we have to have 'latitude','longitude" in the full_xarray, and 'timestamp' if we want temporal sectoring
        if 'latitude' in xobj.variables.keys():
            vars_to_sect += ['latitude']
        if 'longitude' in xobj.variables.keys():
            vars_to_sect += ['longitude']
        if 'timestamp' in xobj.variables.keys():
            vars_to_sect += ['timestamp']

        from geoips2.xarray_utils.data import sector_xarray_dataset
        # The list of variables in vars_to_sect must ALL be the same shape
        sect_xarray = sector_xarray_dataset(
            xobj,
            area_def,
            vars_to_sect,
            verbose=verbose,
            hours_before_sector_time=hours_before_sector_time,
            hours_after_sector_time=hours_after_sector_time)

        # numpy arrays fail if numpy_array is None, and xarrays fail if x_array == None
        if sect_xarray is None:
            if verbose:
                LOG.info('No coverage - skipping dataset')
            continue

        from geoips2.sector_utils.utils import is_sector_type
        if is_sector_type(area_def, 'atcf'):
            from geoips2.sector_utils.utils import check_center_coverage
            has_covg, covg_xarray = check_center_coverage(
                sect_xarray,
                area_def,
                varlist=vars_to_sect,
                covg_varname=vars_to_sect[0],
                width_degrees=8,
                height_degrees=8,
                verbose=verbose)

            if not has_covg:
                LOG.info('SKIPPING NO COVERAGE IN center box - NOT PROCESSING')
                continue

            # If the time within the box is > 50 min, we have two overpasses. ALL PMW sensors are polar orbiters.
            if (covg_xarray.end_datetime -
                    covg_xarray.start_datetime).seconds > 3000:
                LOG.info(
                    'Original sectored xarray contains more than one overpass - switching to start/datetime in center'
                )
                sect_xarray.attrs[
                    'start_datetime'] = covg_xarray.start_datetime
                sect_xarray.attrs['end_datetime'] = covg_xarray.end_datetime

        sect_xarray.attrs[
            'area_def'] = area_def  # add name of this sector to sector attribute
        if hasattr(sect_xarray, 'timestamp'):
            from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp
            from geoips2.xarray_utils.timestamp import get_max_from_xarray_timestamp
            sect_xarray.attrs[
                'start_datetime'] = get_min_from_xarray_timestamp(
                    sect_xarray, 'timestamp')
            sect_xarray.attrs['end_datetime'] = get_max_from_xarray_timestamp(
                sect_xarray, 'timestamp')
            # Note:  need to test whether above two lines can reselect min and max time_info for this sector

        LOG.info(
            'Sectored data start/end datetime: %s %s, %s points from var %s, all vars %s',
            sect_xarray.start_datetime, sect_xarray.end_datetime,
            numpy.ma.count(sect_xarray[vars_to_interp[0]].to_masked_array()),
            vars_to_interp[0], vars_to_interp)
        ret_xobjs += [sect_xarray]

    return ret_xobjs
Example #7
0
def sector_xarray_dataset(full_xarray,
                          area_def,
                          varnames,
                          lon_pad=3,
                          lat_pad=0,
                          verbose=False,
                          hours_before_sector_time=18,
                          hours_after_sector_time=6):
    ''' Use the xarray to appropriately sector out data by lat/lon and time '''
    from datetime import timedelta

    LOG.info('Full xarray start/end datetime: %s %s',
             full_xarray.start_datetime, full_xarray.end_datetime)
    # numpy.ma.count(full_xarray[varnames[0]].to_masked_array()))

    if area_def is not None:
        if hasattr(area_def,
                   'sector_start_datetime') and area_def.sector_start_datetime:
            # If it is a dynamic sector, sector temporally to make sure we use the appropriate data
            mindt = area_def.sector_start_datetime - timedelta(
                hours=hours_before_sector_time)
            maxdt = area_def.sector_start_datetime + timedelta(
                hours=hours_after_sector_time)
            time_xarray = sector_xarray_temporal(full_xarray,
                                                 mindt,
                                                 maxdt,
                                                 varnames,
                                                 verbose=verbose)
        else:
            # If it is not a dynamic sector, just return all of the data, because all we care about is spatial coverage.
            time_xarray = full_xarray.copy()

        extent_lonlat = list(area_def.area_extent_ll)
        sector_xarray = sector_xarray_spatial(time_xarray,
                                              extent_lonlat,
                                              varnames,
                                              lon_pad,
                                              lat_pad,
                                              verbose=verbose)
        if sector_xarray is not None\
           and 'timestamp' in varnames and hasattr(area_def, 'sector_start_datetime') and area_def.sector_start_datetime:
            from geoips2.xarray_utils.timestamp import get_min_from_xarray_timestamp, get_max_from_xarray_timestamp
            sector_xarray.attrs['area_def'] = area_def
            sector_xarray.attrs[
                'start_datetime'] = get_min_from_xarray_timestamp(
                    sector_xarray, 'timestamp')
            sector_xarray.attrs[
                'end_datetime'] = get_max_from_xarray_timestamp(
                    sector_xarray, 'timestamp')
        elif sector_xarray is not None:
            sector_xarray.attrs['area_def'] = area_def
            sector_xarray.attrs['start_datetime'] = full_xarray.start_datetime
            sector_xarray.attrs['end_datetime'] = full_xarray.end_datetime

    else:
        sector_xarray = full_xarray.copy()

    if sector_xarray is not None:
        if verbose:
            LOG.info('Sectored data start/end datetime: %s %s',
                     sector_xarray.start_datetime, sector_xarray.end_datetime)
            # numpy.ma.count(full_xarray[varnames[0]].to_masked_array()))

    return sector_xarray