def main(grb_ws=os.getcwd(), ancillary_ws=os.getcwd(), output_ws=os.getcwd(), variables=['pr'], landsat_ws=None, start_date=None, end_date=None, times_str='', extent_path=None, output_extent=None, stats_flag=True, overwrite_flag=False): """Extract NLDAS target variable(s) Args: grb_ws (str): folder of NLDAS GRB files ancillary_ws (str): folder of ancillary rasters output_ws (str): folder of output rasters variable (list): NLDAS variables to download ('ppt', 'srad', 'sph', 'tair', tmmn', 'tmmx', 'vs') landsat_ws (str): folder of Landsat scenes or tar.gz files start_date (str): ISO format date (YYYY-MM-DD) end_date (str): ISO format date (YYYY-MM-DD) times (str): comma separated values and/or ranges of UTC hours (i.e. "1, 2, 5-8") Parsed with python_common.parse_int_set() extent_path (str): file path defining the output extent output_extent (list): decimal degrees values defining output extent stats_flag (bool): if True, compute raster statistics. Default is True. overwrite_flag (bool): if True, overwrite existing files Returns: None """ logging.info('\nExtract NLDAS target variable(s)') # input_fmt = 'NLDAS_FORA0125_H.A{:04d}{:02d}{:02d}.{}.002.grb' input_re = re.compile( 'NLDAS_FORA0125_H.A(?P<YEAR>\d{4})(?P<MONTH>\d{2})' + '(?P<DAY>\d{2}).(?P<TIME>\d{4}).002.grb$') output_fmt = '{}_{:04d}{:02d}{:02d}_hourly_nldas.img' # output_fmt = '{}_{:04d}{:02d}{:02d}_{:04d}_nldas.img' # If a date is not set, process 2017 try: start_dt = dt.datetime.strptime(start_date, '%Y-%m-%d') logging.debug(' Start date: {}'.format(start_dt)) except: start_dt = dt.datetime(2017, 1, 1) logging.info(' Start date: {}'.format(start_dt)) try: end_dt = dt.datetime.strptime(end_date, '%Y-%m-%d') logging.debug(' End date: {}'.format(end_dt)) except: end_dt = dt.datetime(2017, 12, 31) logging.info(' End date: {}'.format(end_dt)) # Only process a specific hours if not times_str: time_list = range(0, 24, 1) else: time_list = list(parse_int_set(times_str)) time_list = ['{:02d}00'.format(t) for t in time_list] # Assume NLDAS is NAD83 # input_epsg = 'EPSG:4269' # NLDAS rasters to extract data_full_list = ['pr', 'srad', 'sph', 'tair', 'tmmn', 'tmmx', 'vs'] if not variables: logging.error('\nERROR: variables parameter is empty\n') sys.exit() elif type(variables) is not list: # DEADBEEF - I could try converting comma separated strings to lists? logging.warning('\nERROR: variables parameter must be a list\n') sys.exit() elif not set(variables).issubset(set(data_full_list)): logging.error('\nERROR: variables parameter is invalid\n {}'.format( variables)) sys.exit() # Ancillary raster paths mask_path = os.path.join(ancillary_ws, 'nldas_mask.img') # Build a date list from landsat_ws scene folders or tar.gz files date_list = [] if landsat_ws is not None and os.path.isdir(landsat_ws): logging.info('\nReading dates from Landsat IDs') logging.info(' {}'.format(landsat_ws)) landsat_re = re.compile( '^(?:LT04|LT05|LE07|LC08)_(?:\d{3})(?:\d{3})_' + '(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})') for root, dirs, files in os.walk(landsat_ws, topdown=True): # If root matches, don't explore subfolders try: landsat_match = landsat_re.match(os.path.basename(root)) date_list.append(dt.datetime.strptime( '_'.join(landsat_match.groups()), '%Y_%m_%d').date().isoformat()) dirs[:] = [] except: pass for file in files: try: landsat_match = landsat_re.match(file) date_list.append(dt.datetime.strptime( '_'.join(landsat_match.groups()), '%Y_%m_%d').date().isoformat()) except: pass date_list = sorted(list(set(date_list))) # elif landsat_ws is not None and os.path.isfile(landsat_ws): # with open(landsat_ws) as landsat_f: # This allows GDAL to throw Python Exceptions # gdal.UseExceptions() # mem_driver = gdal.GetDriverByName('MEM') # Get the NLDAS spatial reference from the mask raster nldas_ds = gdal.Open(mask_path) nldas_osr = gdc.raster_ds_osr(nldas_ds) nldas_proj = gdc.osr_proj(nldas_osr) nldas_cs = gdc.raster_ds_cellsize(nldas_ds, x_only=True) nldas_extent = gdc.raster_ds_extent(nldas_ds) nldas_geo = nldas_extent.geo(nldas_cs) nldas_x, nldas_y = nldas_extent.origin() nldas_ds = None logging.debug(' Projection: {}'.format(nldas_proj)) logging.debug(' Cellsize: {}'.format(nldas_cs)) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(nldas_extent)) # Subset data to a smaller extent if output_extent is not None: logging.info('\nComputing subset extent & geo') logging.debug(' Extent: {}'.format(output_extent)) nldas_extent = gdc.Extent(output_extent) nldas_extent.adjust_to_snap('EXPAND', nldas_x, nldas_y, nldas_cs) nldas_geo = nldas_extent.geo(nldas_cs) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(output_extent)) elif extent_path is not None: logging.info('\nComputing subset extent & geo') if extent_path.lower().endswith('.shp'): nldas_extent = gdc.feature_path_extent(extent_path) extent_osr = gdc.feature_path_osr(extent_path) extent_cs = None else: nldas_extent = gdc.raster_path_extent(extent_path) extent_osr = gdc.raster_path_osr(extent_path) extent_cs = gdc.raster_path_cellsize(extent_path, x_only=True) nldas_extent = gdc.project_extent( nldas_extent, extent_osr, nldas_osr, extent_cs) nldas_extent.adjust_to_snap('EXPAND', nldas_x, nldas_y, nldas_cs) nldas_geo = nldas_extent.geo(nldas_cs) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(nldas_extent)) logging.debug('') # Read the NLDAS mask array if present if mask_path and os.path.isfile(mask_path): mask_array, mask_nodata = gdc.raster_to_array( mask_path, mask_extent=nldas_extent, fill_value=0, return_nodata=True) mask_array = mask_array != mask_nodata else: mask_array = None # NLDAS band name dictionary nldas_band_dict = dict() nldas_band_dict['pr'] = 'Total precipitation [kg/m^2]' nldas_band_dict['srad'] = 'Downward shortwave radiation flux [W/m^2]' nldas_band_dict['sph'] = 'Specific humidity [kg/kg]' nldas_band_dict['tair'] = 'Temperature [C]' nldas_band_dict['tmmn'] = 'Temperature [C]' nldas_band_dict['tmmx'] = 'Temperature [C]' nldas_band_dict['vs'] = [ 'u-component of wind [m/s]', 'v-component of wind [m/s]'] # NLDAS band name dictionary # nldas_band_dict = dict() # nldas_band_dict['pr'] = 'precipitation_amount' # nldas_band_dict['srad'] = 'surface_downwelling_shortwave_flux_in_air' # nldas_band_dict['sph'] = 'specific_humidity' # nldas_band_dict['tmmn'] = 'air_temperature' # nldas_band_dict['tmmx'] = 'air_temperature' # nldas_band_dict['vs'] = 'wind_speed' # NLDAS band name dictionary (EarthEngine keys, GRID_ELEMENT values) # nldas_band_dict = dict() # nldas_band_dict['total_precipitation'] = 'Total precipitation [kg/m^2]' # nldas_band_dict['shortwave_radiation'] = 'Downward shortwave radiation flux [W/m^2]' # nldas_band_dict['specific_humidity'] = 'Specific humidity [kg/kg]' # nldas_band_dict['pressure'] = 'Pressure [Pa]' # nldas_band_dict['temperature'] = 'Temperature [C]' # nldas_band_dict['wind_u'] = 'u-component of wind [m/s]' # nldas_band_dict['wind_v'] = 'v-component of wind [m/s]' # Process each variable logging.info('\nReading NLDAS GRIBs') for input_var in variables: logging.info("Variable: {}".format(input_var)) # Build output folder var_ws = os.path.join(output_ws, input_var) if not os.path.isdir(var_ws): os.makedirs(var_ws) # Each sub folder in the main folde has all imagery for 1 day # The path for each subfolder is the /YYYY/DOY # This approach will process files for target dates # for input_dt in date_range(start_dt, end_dt + dt.timedelta(1)): # logging.info(input_dt.date()) # Iterate all available files and check dates if necessary for root, folders, files in os.walk(grb_ws): root_split = os.path.normpath(root).split(os.sep) # If the year/doy is outside the range, skip if (re.match('\d{4}', root_split[-2]) and re.match('\d{3}', root_split[-1])): root_dt = dt.datetime.strptime('{}_{}'.format( root_split[-2], root_split[-1]), '%Y_%j') logging.info('{}-{:02d}-{:02d}'.format( root_dt.year, root_dt.month, root_dt.day)) if ((start_dt is not None and root_dt < start_dt) or (end_dt is not None and root_dt > end_dt)): continue elif date_list and root_dt.date().isoformat() not in date_list: continue # If the year is outside the range, don't search subfolders elif re.match('\d{4}', root_split[-1]): root_year = int(root_split[-1]) logging.info('Year: {}'.format(root_year)) if ((start_dt is not None and root_year < start_dt.year) or (end_dt is not None and root_year > end_dt.year)): folders[:] = [] else: folders[:] = sorted(folders) continue else: continue # Create a single raster for each day with 24 bands # Each time step will be stored in a separate band output_name = output_fmt.format( input_var, root_dt.year, root_dt.month, root_dt.day) output_path = os.path.join( var_ws, str(root_dt.year), output_name) logging.debug(' {}'.format(output_path)) if os.path.isfile(output_path): if not overwrite_flag: logging.debug(' File already exists, skipping') continue else: logging.debug(' File already exists, removing existing') os.remove(output_path) logging.debug(' {}'.format(root)) if not os.path.isdir(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path)) gdc.build_empty_raster( output_path, band_cnt=24, output_dtype=np.float32, output_proj=nldas_proj, output_cs=nldas_cs, output_extent=nldas_extent, output_fill_flag=True) # Iterate through hourly files for input_name in sorted(files): logging.info(' {}'.format(input_name)) input_path = os.path.join(root, input_name) input_match = input_re.match(input_name) if input_match is None: logging.debug( ' Regular expression didn\'t match, skipping') continue input_dt = dt.datetime( int(input_match.group('YEAR')), int(input_match.group('MONTH')), int(input_match.group('DAY'))) time_str = input_match.group('TIME') band_num = int(time_str[:2]) + 1 # if start_dt is not None and input_dt < start_dt: # continue # elif end_dt is not None and input_dt > end_dt: # continue # elif date_list and input_dt.date().isoformat() not in date_list: # continue if time_str not in time_list: logging.debug(' Time not in list, skipping') continue logging.debug(' Time: {} {}'.format( input_dt.date(), time_str)) logging.debug(' Band: {}'.format(band_num)) # Determine band numbering/naming input_band_dict = grib_band_names(input_path) # Extract array and save input_ds = gdal.Open(input_path) # Convert Kelvin to Celsius (old NLDAS files were in K i think) if input_var in ['tair', 'tmmx', 'tmmn']: # Temperature should be in C for et_common.refet_hourly_func() if 'Temperature [K]' in input_band_dict.keys(): temp_band_units = 'K' output_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['Temperature [K]'], mask_extent=nldas_extent, return_nodata=False) elif 'Temperature [C]' in input_band_dict.keys(): temp_band_units = 'C' output_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['Temperature [C]'], mask_extent=nldas_extent, return_nodata=False) else: logging.error('Unknown Temperature units, skipping') logging.error(' {}'.format(input_band_dict.keys())) continue # DEADBEEF - Having issue with T appearing to be C but labeled as K # Try to determine temperature units from values temp_mean = float(np.nanmean(output_array)) temp_units_dict = {20: 'C', 293: 'K'} temp_array_units = temp_units_dict[ min(temp_units_dict, key=lambda x:abs(x - temp_mean))] if temp_array_units == 'K' and temp_band_units == 'K': logging.debug(' Converting temperature from K to C') output_array -= 273.15 elif temp_array_units == 'C' and temp_band_units == 'C': pass elif temp_array_units == 'C' and temp_band_units == 'K': logging.debug( (' Temperature units are K in the GRB band name, ' + 'but values appear to be C\n Mean temperature: {:.2f}\n' + ' Values will NOT be adjusted').format(temp_mean)) elif temp_array_units == 'K' and temp_band_units == 'C': logging.debug( (' Temperature units are C in the GRB band name, ' + 'but values appear to be K\n Mean temperature: {:.2f}\n' + ' Values will be adjusted from K to C').format(temp_mean)) output_array -= 273.15 # Compute wind speed from vectors elif input_var == 'vs': wind_u_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['u-component of wind [m/s]'], mask_extent=nldas_extent, return_nodata=False) wind_v_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['v-component of wind [m/s]'], mask_extent=nldas_extent, return_nodata=False) output_array = np.sqrt( wind_u_array ** 2 + wind_v_array ** 2) # Read all other variables directly else: output_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict[nldas_band_dict[input_var]], mask_extent=nldas_extent, return_nodata=False) # Save the projected array as 32-bit floats gdc.array_to_comp_raster( output_array.astype(np.float32), output_path, band=band_num) # gdc.block_to_raster( # ea_array.astype(np.float32), output_path, band=band) # gdc.array_to_raster( # output_array.astype(np.float32), output_path, # output_geo=nldas_geo, output_proj=nldas_proj, # stats_flag=stats_flag) del output_array input_ds = None if stats_flag: gdc.raster_statistics(output_path) logging.debug('\nScript Complete')
def main(netcdf_ws=os.getcwd(), ancillary_ws=os.getcwd(), output_ws=os.getcwd(), start_date=None, end_date=None, extent_path=None, output_extent=None, stats_flag=True, overwrite_flag=False): """Extract GRIDMET temperature Args: netcdf_ws (str): folder of GRIDMET netcdf files ancillary_ws (str): folder of ancillary rasters output_ws (str): folder of output rasters start_date (str): ISO format date (YYYY-MM-DD) end_date (str): ISO format date (YYYY-MM-DD) extent_path (str): file path defining the output extent output_extent (list): decimal degrees values defining output extent stats_flag (bool): if True, compute raster statistics. Default is True. overwrite_flag (bool): if True, overwrite existing files Returns: None """ logging.info('\nExtracting GRIDMET temperature') # If a date is not set, process 2017 try: start_dt = dt.datetime.strptime(start_date, '%Y-%m-%d') logging.debug(' Start date: {}'.format(start_dt)) except: start_dt = dt.datetime(2017, 1, 1) logging.info(' Start date: {}'.format(start_dt)) try: end_dt = dt.datetime.strptime(end_date, '%Y-%m-%d') logging.debug(' End date: {}'.format(end_dt)) except: end_dt = dt.datetime(2017, 12, 31) logging.info(' End date: {}'.format(end_dt)) # Save GRIDMET lat, lon, and elevation arrays elev_raster = os.path.join(ancillary_ws, 'gridmet_elev.img') output_fmt = '{}_{}_daily_gridmet.img' gridmet_re = re.compile('(?P<VAR>\w+)_(?P<YEAR>\d{4}).nc$') # GRIDMET band name dictionary gridmet_band_dict = dict() gridmet_band_dict['pr'] = 'precipitation_amount' gridmet_band_dict['srad'] = 'surface_downwelling_shortwave_flux_in_air' gridmet_band_dict['sph'] = 'specific_humidity' gridmet_band_dict['tmmn'] = 'air_temperature' gridmet_band_dict['tmmx'] = 'air_temperature' gridmet_band_dict['vs'] = 'wind_speed' # Get extent/geo from elevation raster gridmet_ds = gdal.Open(elev_raster) gridmet_osr = gdc.raster_ds_osr(gridmet_ds) gridmet_proj = gdc.osr_proj(gridmet_osr) gridmet_cs = gdc.raster_ds_cellsize(gridmet_ds, x_only=True) gridmet_extent = gdc.raster_ds_extent(gridmet_ds) gridmet_full_geo = gridmet_extent.geo(gridmet_cs) gridmet_x, gridmet_y = gridmet_extent.origin() gridmet_ds = None logging.debug(' Projection: {}'.format(gridmet_proj)) logging.debug(' Cellsize: {}'.format(gridmet_cs)) logging.debug(' Geo: {}'.format(gridmet_full_geo)) logging.debug(' Extent: {}'.format(gridmet_extent)) # Subset data to a smaller extent if output_extent is not None: logging.info('\nComputing subset extent & geo') logging.debug(' Extent: {}'.format(output_extent)) gridmet_extent = gdc.Extent(output_extent) gridmet_extent.adjust_to_snap('EXPAND', gridmet_x, gridmet_y, gridmet_cs) gridmet_geo = gridmet_extent.geo(gridmet_cs) logging.debug(' Geo: {}'.format(gridmet_geo)) logging.debug(' Extent: {}'.format(gridmet_extent)) elif extent_path is not None: logging.info('\nComputing subset extent & geo') if extent_path.lower().endswith('.shp'): gridmet_extent = gdc.feature_path_extent(extent_path) extent_osr = gdc.feature_path_osr(extent_path) extent_cs = None else: gridmet_extent = gdc.raster_path_extent(extent_path) extent_osr = gdc.raster_path_osr(extent_path) extent_cs = gdc.raster_path_cellsize(extent_path, x_only=True) gridmet_extent = gdc.project_extent(gridmet_extent, extent_osr, gridmet_osr, extent_cs) gridmet_extent.adjust_to_snap('EXPAND', gridmet_x, gridmet_y, gridmet_cs) gridmet_geo = gridmet_extent.geo(gridmet_cs) logging.debug(' Geo: {}'.format(gridmet_geo)) logging.debug(' Extent: {}'.format(gridmet_extent)) else: gridmet_geo = gridmet_full_geo # Get indices for slicing/clipping input arrays g_i, g_j = gdc.array_geo_offsets(gridmet_full_geo, gridmet_geo, cs=gridmet_cs) g_rows, g_cols = gridmet_extent.shape(cs=gridmet_cs) # Process each variable logging.info("") variables = ['tmmn', 'tmmx'] for input_var in variables: logging.info("\nVariable: {}".format(input_var)) output_var = input_var # Build output folder var_ws = os.path.join(output_ws, output_var) if not os.path.isdir(var_ws): os.makedirs(var_ws) # Process each file in the input workspace for input_name in sorted(os.listdir(netcdf_ws)): input_match = gridmet_re.match(input_name) if not input_match: logging.debug("{}".format(input_name)) logging.debug(' Regular expression didn\'t match, skipping') continue elif input_match.group('VAR') != input_var: logging.debug("{}".format(input_name)) logging.debug(' Variable didn\'t match, skipping') continue else: logging.info("{}".format(input_name)) year_str = input_match.group('YEAR') logging.info(" {}".format(year_str)) year_int = int(year_str) year_days = int(dt.datetime(year_int, 12, 31).strftime('%j')) if start_dt is not None and year_int < start_dt.year: logging.debug(' Before start date, skipping') continue elif end_dt is not None and year_int > end_dt.year: logging.debug(' After end date, skipping') continue # Build input file path input_raster = os.path.join(netcdf_ws, input_name) # if not os.path.isfile(input_raster): # logging.debug( # ' Input NetCDF doesn\'t exist, skipping {}'.format( # input_raster)) # continue # Create a single raster for each year with 365 bands # Each day will be stored in a separate band output_path = os.path.join(var_ws, output_fmt.format(output_var, year_str)) logging.debug(' {}'.format(output_path)) if os.path.isfile(output_path): logging.debug(' {}'.format(output_path)) if not overwrite_flag: logging.debug(' File already exists, skipping') continue else: logging.debug(' File already exists, removing existing') os.remove(output_path) gdc.build_empty_raster(output_path, band_cnt=366, output_dtype=np.float32, output_proj=gridmet_proj, output_cs=gridmet_cs, output_extent=gridmet_extent, output_fill_flag=True) # Read in the GRIDMET NetCDF file # Immediatly clip input array to save memory input_nc_f = netCDF4.Dataset(input_raster, 'r') input_nc = input_nc_f.variables[ gridmet_band_dict[input_var]][:, g_i:g_i + g_cols, g_j:g_j + g_rows].copy() input_nc = np.transpose(input_nc, (0, 2, 1)) # A numpy array is returned when slicing a masked array # if there are no masked pixels # This is a hack to force the numpy array back to a masked array if type(input_nc) != np.ma.core.MaskedArray: input_nc = np.ma.core.MaskedArray( input_nc, np.zeros(input_nc.shape, dtype=bool)) # Check all valid dates in the year year_dates = date_range(dt.datetime(year_int, 1, 1), dt.datetime(year_int + 1, 1, 1)) for date_dt in year_dates: if start_dt is not None and date_dt < start_dt: logging.debug(' {} - before start date, skipping'.format( date_dt.date())) continue elif end_dt is not None and date_dt > end_dt: logging.debug(' {} - after end date, skipping'.format( date_dt.date())) continue else: logging.info(' {}'.format(date_dt.date())) doy = int(date_dt.strftime('%j')) doy_i = range(1, year_days + 1).index(doy) # Arrays are read as masked array with a fill value of -9999 # Convert to basic numpy array arrays with nan values try: input_full_ma = input_nc[doy_i, :, :] except IndexError: logging.info(' date not in netcdf, skipping') continue input_full_array = input_full_ma.data.astype(np.float32) input_full_nodata = float(input_full_ma.fill_value) input_full_array[input_full_array == input_full_nodata] = np.nan # Since inputs are netcdf, need to create GDAL raster # datasets in order to use gdal_common functions # Create an in memory dataset of the full ETo array input_full_ds = gdc.array_to_mem_ds( input_full_array, output_geo=gridmet_full_geo, output_proj=gridmet_proj) # Then extract the subset from the in memory dataset output_array = gdc.raster_ds_to_array( input_full_ds, 1, mask_extent=gridmet_extent, return_nodata=False) # Convert Kelvin to Celsius if input_var in ['tmmx', 'tmmn']: output_array -= 273.15 # Save the projected array as 32-bit floats gdc.array_to_comp_raster(output_array.astype(np.float32), output_path, band=doy, stats_flag=False) # gdc.array_to_raster( # output_array.astype(np.float32), output_path, # output_geo=gridmet_geo, output_proj=gridmet_proj, # stats_flag=False) del output_array input_nc_f.close() del input_nc_f if stats_flag: gdc.raster_statistics(output_path) logging.debug('\nScript Complete')
def main(netcdf_ws=os.getcwd(), ancillary_ws=os.getcwd(), output_ws=os.getcwd(), start_date=None, end_date=None, extent_path=None, output_extent=None, stats_flag=True, overwrite_flag=False): """Extract DAYMET precipitation Args: netcdf_ws (str): folder of DAYMET netcdf files ancillary_ws (str): folder of ancillary rasters output_ws (str): folder of output rasters start_date (str): ISO format date (YYYY-MM-DD) end_date (str): ISO format date (YYYY-MM-DD) extent_path (str): file path defining the output extent output_extent (list): decimal degrees values defining output extent stats_flag (bool): if True, compute raster statistics. Default is True. overwrite_flag (bool): if True, overwrite existing files Returns: None """ logging.info('\nExtracting DAYMET precipitation') # If a date is not set, process 2015 try: start_dt = dt.datetime.strptime(start_date, '%Y-%m-%d') logging.debug(' Start date: {}'.format(start_dt)) except: start_dt = dt.datetime(2015, 1, 1) logging.info(' Start date: {}'.format(start_dt)) try: end_dt = dt.datetime.strptime(end_date, '%Y-%m-%d') logging.debug(' End date: {}'.format(end_dt)) except: end_dt = dt.datetime(2015, 12, 31) logging.info(' End date: {}'.format(end_dt)) # Save DAYMET lat, lon, and elevation arrays mask_raster = os.path.join(ancillary_ws, 'daymet_mask.img') daymet_re = re.compile('daymet_v3_(?P<VAR>\w+)_(?P<YEAR>\d{4})_na.nc4$') # DAYMET band name dictionary # daymet_band_dict = dict() # daymet_band_dict['prcp'] = 'precipitation_amount' # daymet_band_dict['srad'] = 'surface_downwelling_shortwave_flux_in_air' # daymet_band_dict['sph'] = 'specific_humidity' # daymet_band_dict['tmin'] = 'air_temperature' # daymet_band_dict['tmax'] = 'air_temperature' # Get extent/geo from mask raster daymet_ds = gdal.Open(mask_raster) daymet_osr = gdc.raster_ds_osr(daymet_ds) daymet_proj = gdc.osr_proj(daymet_osr) daymet_cs = gdc.raster_ds_cellsize(daymet_ds, x_only=True) daymet_extent = gdc.raster_ds_extent(daymet_ds) daymet_geo = daymet_extent.geo(daymet_cs) daymet_x, daymet_y = daymet_extent.origin() daymet_ds = None logging.debug(' Projection: {}'.format(daymet_proj)) logging.debug(' Cellsize: {}'.format(daymet_cs)) logging.debug(' Geo: {}'.format(daymet_geo)) logging.debug(' Extent: {}'.format(daymet_extent)) logging.debug(' Origin: {} {}'.format(daymet_x, daymet_y)) # Subset data to a smaller extent if output_extent is not None: logging.info('\nComputing subset extent & geo') logging.debug(' Extent: {}'.format(output_extent)) # Assume input extent is in decimal degrees output_extent = gdc.project_extent( gdc.Extent(output_extent), gdc.epsg_osr(4326), daymet_osr, 0.001) output_extent = gdc.intersect_extents([daymet_extent, output_extent]) output_extent.adjust_to_snap('EXPAND', daymet_x, daymet_y, daymet_cs) output_geo = output_extent.geo(daymet_cs) logging.debug(' Geo: {}'.format(output_geo)) logging.debug(' Extent: {}'.format(output_extent)) elif extent_path is not None: logging.info('\nComputing subset extent & geo') if extent_path.lower().endswith('.shp'): output_extent = gdc.feature_path_extent(extent_path) extent_osr = gdc.feature_path_osr(extent_path) extent_cs = None else: output_extent = gdc.raster_path_extent(extent_path) extent_osr = gdc.raster_path_osr(extent_path) extent_cs = gdc.raster_path_cellsize(extent_path, x_only=True) output_extent = gdc.project_extent( output_extent, extent_osr, daymet_osr, extent_cs) output_extent = gdc.intersect_extents([daymet_extent, output_extent]) output_extent.adjust_to_snap('EXPAND', daymet_x, daymet_y, daymet_cs) output_geo = output_extent.geo(daymet_cs) logging.debug(' Geo: {}'.format(output_geo)) logging.debug(' Extent: {}'.format(output_extent)) else: output_extent = daymet_extent.copy() output_geo = daymet_geo[:] # output_shape = output_extent.shape(cs=daymet_cs) xi, yi = gdc.array_geo_offsets(daymet_geo, output_geo, daymet_cs) output_rows, output_cols = output_extent.shape(daymet_cs) logging.debug(' Shape: {} {}'.format(output_rows, output_cols)) logging.debug(' Offsets: {} {} (x y)'.format(xi, yi)) # Process each variable input_var = 'prcp' output_var = 'ppt' logging.info("\nVariable: {}".format(input_var)) # Build output folder var_ws = os.path.join(output_ws, output_var) if not os.path.isdir(var_ws): os.makedirs(var_ws) # Process each file in the input workspace for input_name in sorted(os.listdir(netcdf_ws)): logging.debug("{}".format(input_name)) input_match = daymet_re.match(input_name) if not input_match: logging.debug(' Regular expression didn\'t match, skipping') continue elif input_match.group('VAR') != input_var: logging.debug(' Variable didn\'t match, skipping') continue year_str = input_match.group('YEAR') logging.info(" Year: {}".format(year_str)) year_int = int(year_str) year_days = int(dt.datetime(year_int, 12, 31).strftime('%j')) if start_dt is not None and year_int < start_dt.year: logging.debug(' Before start date, skipping') continue elif end_dt is not None and year_int > end_dt.year: logging.debug(' After end date, skipping') continue # Build input file path input_raster = os.path.join(netcdf_ws, input_name) # if not os.path.isfile(input_raster): # logging.debug( # ' Input raster doesn\'t exist, skipping {}'.format( # input_raster)) # continue # Build output folder output_year_ws = os.path.join(var_ws, year_str) if not os.path.isdir(output_year_ws): os.makedirs(output_year_ws) # Read in the DAYMET NetCDF file input_nc_f = netCDF4.Dataset(input_raster, 'r') # logging.debug(input_nc_f.variables) # Check all valid dates in the year year_dates = date_range( dt.datetime(year_int, 1, 1), dt.datetime(year_int + 1, 1, 1)) for date_dt in year_dates: if start_dt is not None and date_dt < start_dt: logging.debug(' {} - before start date, skipping'.format( date_dt.date())) continue elif end_dt is not None and date_dt > end_dt: logging.debug(' {} - after end date, skipping'.format( date_dt.date())) continue else: logging.info(' {}'.format(date_dt.date())) output_path = os.path.join( output_year_ws, '{}_{}_daymet.img'.format( output_var, date_dt.strftime('%Y%m%d'))) if os.path.isfile(output_path): logging.debug(' {}'.format(output_path)) if not overwrite_flag: logging.debug(' File already exists, skipping') continue else: logging.debug(' File already exists, removing existing') os.remove(output_path) doy = int(date_dt.strftime('%j')) doy_i = range(1, year_days + 1).index(doy) # Arrays are being read as masked array with a fill value of -9999 # Convert to basic numpy array arrays with nan values try: input_ma = input_nc_f.variables[input_var][ doy_i, yi: yi + output_rows, xi: xi + output_cols] except IndexError: logging.info(' date not in netcdf, skipping') continue input_nodata = float(input_ma.fill_value) output_array = input_ma.data.astype(np.float32) output_array[output_array == input_nodata] = np.nan # Save the array as 32-bit floats gdc.array_to_raster( output_array.astype(np.float32), output_path, output_geo=output_geo, output_proj=daymet_proj, stats_flag=stats_flag) del input_ma, output_array input_nc_f.close() del input_nc_f logging.debug('\nScript Complete')
def main(grb_ws=os.getcwd(), ancillary_ws=os.getcwd(), output_ws=os.getcwd(), etr_flag=False, eto_flag=False, landsat_ws=None, start_date=None, end_date=None, times_str='', extent_path=None, output_extent=None, daily_flag=True, stats_flag=True, overwrite_flag=False): """Compute hourly ETr/ETo from NLDAS data Args: grb_ws (str): folder of NLDAS GRB files ancillary_ws (str): folder of ancillary rasters output_ws (str): folder of output rasters etr_flag (bool): if True, compute alfalfa reference ET (ETr) eto_flag (bool): if True, compute grass reference ET (ETo) landsat_ws (str): folder of Landsat scenes or tar.gz files start_date (str): ISO format date (YYYY-MM-DD) end_date (str): ISO format date (YYYY-MM-DD) times (str): comma separated values and/or ranges of UTC hours (i.e. "1, 2, 5-8") Parsed with python_common.parse_int_set() extent_path (str): file path defining the output extent output_extent (list): decimal degrees values defining output extent daily_flag (bool): if True, save daily ETr/ETo sum raster. Default is True stats_flag (bool): if True, compute raster statistics. Default is True. overwrite_flag (bool): if True, overwrite existing files Returns: None """ logging.info('\nComputing NLDAS hourly ETr/ETo') np.seterr(invalid='ignore') # Compute ETr and/or ETo if not etr_flag and not eto_flag: logging.info(' ETo/ETr flag(s) not set, defaulting to ETr') etr_flag = True # If a date is not set, process 2017 try: start_dt = dt.datetime.strptime(start_date, '%Y-%m-%d') logging.debug(' Start date: {}'.format(start_dt)) except: start_dt = dt.datetime(2017, 1, 1) logging.info(' Start date: {}'.format(start_dt)) try: end_dt = dt.datetime.strptime(end_date, '%Y-%m-%d') logging.debug(' End date: {}'.format(end_dt)) except: end_dt = dt.datetime(2017, 12, 31) logging.info(' End date: {}'.format(end_dt)) # Only process a specific hours if not times_str: time_list = range(0, 24, 1) else: time_list = list(parse_int_set(times_str)) time_list = ['{:02d}00'.format(t) for t in time_list] etr_folder = 'etr' eto_folder = 'eto' hour_fmt = '{}_{:04d}{:02d}{:02d}_hourly_nldas.img' # hour_fmt = '{}_{:04d}{:02d}{:02d}_{4:04d}_nldas.img' day_fmt = '{}_{:04d}{:02d}{:02d}_nldas.img' # input_fmt = 'NLDAS_FORA0125_H.A{:04d}{:02d}{:02d}.{}.002.grb' input_re = re.compile('NLDAS_FORA0125_H.A(?P<YEAR>\d{4})(?P<MONTH>\d{2})' + '(?P<DAY>\d{2}).(?P<TIME>\d{4}).002.grb$') # Assume NLDAS is NAD83 # input_epsg = 'EPSG:4269' # Ancillary raster paths mask_path = os.path.join(ancillary_ws, 'nldas_mask.img') elev_path = os.path.join(ancillary_ws, 'nldas_elev.img') lat_path = os.path.join(ancillary_ws, 'nldas_lat.img') lon_path = os.path.join(ancillary_ws, 'nldas_lon.img') # Build a date list from landsat_ws scene folders or tar.gz files date_list = [] if landsat_ws is not None and os.path.isdir(landsat_ws): logging.info('\nReading dates from Landsat IDs') logging.info(' {}'.format(landsat_ws)) landsat_re = re.compile( '^(?:LT04|LT05|LE07|LC08)_(?:\d{3})(?:\d{3})_' + '(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})') for root, dirs, files in os.walk(landsat_ws, topdown=True): # If root matches, don't explore subfolders try: landsat_match = landsat_re.match(os.path.basename(root)) date_list.append( dt.datetime.strptime('_'.join(landsat_match.groups()), '%Y_%m_%d').date().isoformat()) dirs[:] = [] except: pass for file in files: try: landsat_match = landsat_re.match(file) date_list.append( dt.datetime.strptime('_'.join(landsat_match.groups()), '%Y_%m_%d').date().isoformat()) except: pass date_list = sorted(list(set(date_list))) # elif landsat_ws is not None and os.path.isfile(landsat_ws): # with open(landsat_ws) as landsat_f: # This allows GDAL to throw Python Exceptions # gdal.UseExceptions() # mem_driver = gdal.GetDriverByName('MEM') # Get the NLDAS spatial reference from the mask raster nldas_ds = gdal.Open(mask_path) nldas_osr = gdc.raster_ds_osr(nldas_ds) nldas_proj = gdc.osr_proj(nldas_osr) nldas_cs = gdc.raster_ds_cellsize(nldas_ds, x_only=True) nldas_extent = gdc.raster_ds_extent(nldas_ds) nldas_geo = nldas_extent.geo(nldas_cs) nldas_x, nldas_y = nldas_extent.origin() nldas_ds = None logging.debug(' Projection: {}'.format(nldas_proj)) logging.debug(' Cellsize: {}'.format(nldas_cs)) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(nldas_extent)) # Subset data to a smaller extent if output_extent is not None: logging.info('\nComputing subset extent & geo') logging.debug(' Extent: {}'.format(output_extent)) nldas_extent = gdc.Extent(output_extent) nldas_extent.adjust_to_snap('EXPAND', nldas_x, nldas_y, nldas_cs) nldas_geo = nldas_extent.geo(nldas_cs) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(output_extent)) elif extent_path is not None: logging.info('\nComputing subset extent & geo') if extent_path.lower().endswith('.shp'): nldas_extent = gdc.feature_path_extent(extent_path) extent_osr = gdc.feature_path_osr(extent_path) extent_cs = None else: nldas_extent = gdc.raster_path_extent(extent_path) extent_osr = gdc.raster_path_osr(extent_path) extent_cs = gdc.raster_path_cellsize(extent_path, x_only=True) nldas_extent = gdc.project_extent(nldas_extent, extent_osr, nldas_osr, extent_cs) nldas_extent.adjust_to_snap('EXPAND', nldas_x, nldas_y, nldas_cs) nldas_geo = nldas_extent.geo(nldas_cs) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(nldas_extent)) logging.debug('') # Read the NLDAS mask array if present if mask_path and os.path.isfile(mask_path): mask_array, mask_nodata = gdc.raster_to_array(mask_path, mask_extent=nldas_extent, fill_value=0, return_nodata=True) mask_array = mask_array != mask_nodata else: mask_array = None # Read ancillary arrays (or subsets?) elev_array = gdc.raster_to_array(elev_path, mask_extent=nldas_extent, return_nodata=False) # pair_array = et_common.air_pressure_func(elev_array) lat_array = gdc.raster_to_array(lat_path, mask_extent=nldas_extent, return_nodata=False) lon_array = gdc.raster_to_array(lon_path, mask_extent=nldas_extent, return_nodata=False) # Hourly RefET functions expects lat/lon in radians lat_array *= (math.pi / 180) lon_array *= (math.pi / 180) # Build output folder etr_ws = os.path.join(output_ws, etr_folder) eto_ws = os.path.join(output_ws, eto_folder) if etr_flag and not os.path.isdir(etr_ws): os.makedirs(etr_ws) if eto_flag and not os.path.isdir(eto_ws): os.makedirs(eto_ws) # DEADBEEF - Instead of processing all available files, the following # code will process files for target dates # for input_dt in date_range(start_dt, end_dt + dt.timedelta(1)): # logging.info(input_dt.date()) # Iterate all available files and check dates if necessary # Each sub folder in the main folder has all imagery for 1 day # (in UTC time) # The path for each subfolder is the /YYYY/DOY errors = defaultdict(list) for root, folders, files in os.walk(grb_ws): root_split = os.path.normpath(root).split(os.sep) # If the year/doy is outside the range, skip if (re.match('\d{4}', root_split[-2]) and re.match('\d{3}', root_split[-1])): root_dt = dt.datetime.strptime( '{}_{}'.format(root_split[-2], root_split[-1]), '%Y_%j') logging.info('{}'.format(root_dt.date())) if ((start_dt is not None and root_dt < start_dt) or (end_dt is not None and root_dt > end_dt)): continue elif date_list and root_dt.date().isoformat() not in date_list: continue # If the year is outside the range, don't search subfolders elif re.match('\d{4}', root_split[-1]): root_year = int(root_split[-1]) logging.info('Year: {}'.format(root_year)) if ((start_dt is not None and root_year < start_dt.year) or (end_dt is not None and root_year > end_dt.year)): folders[:] = [] else: folders[:] = sorted(folders) continue else: continue logging.debug(' {}'.format(root)) # Start off assuming every file needs to be processed day_skip_flag = False # Build output folders if necessary etr_year_ws = os.path.join(etr_ws, str(root_dt.year)) eto_year_ws = os.path.join(eto_ws, str(root_dt.year)) if etr_flag and not os.path.isdir(etr_year_ws): os.makedirs(etr_year_ws) if eto_flag and not os.path.isdir(eto_year_ws): os.makedirs(eto_year_ws) # Build daily total paths etr_day_path = os.path.join( etr_year_ws, day_fmt.format('etr', root_dt.year, root_dt.month, root_dt.day)) eto_day_path = os.path.join( eto_year_ws, day_fmt.format('eto', root_dt.year, root_dt.month, root_dt.day)) etr_hour_path = os.path.join( etr_year_ws, hour_fmt.format('etr', root_dt.year, root_dt.month, root_dt.day)) eto_hour_path = os.path.join( eto_year_ws, hour_fmt.format('eto', root_dt.year, root_dt.month, root_dt.day)) # logging.debug(' {}'.format(etr_hour_path)) # If daily ETr/ETo files are present, day can be skipped if not overwrite_flag and daily_flag: if etr_flag and not os.path.isfile(etr_day_path): pass elif eto_flag and not os.path.isfile(eto_day_path): pass else: day_skip_flag = True # If the hour and daily files don't need to be made, skip the day if not overwrite_flag: if etr_flag and not os.path.isfile(etr_hour_path): pass elif eto_flag and not os.path.isfile(eto_hour_path): pass elif day_skip_flag: logging.debug(' File(s) already exist, skipping') continue # Create a single raster for each day with 24 bands # Each time step will be stored in a separate band if etr_flag: logging.debug(' {}'.format(etr_day_path)) gdc.build_empty_raster(etr_hour_path, band_cnt=24, output_dtype=np.float32, output_proj=nldas_proj, output_cs=nldas_cs, output_extent=nldas_extent, output_fill_flag=True) if eto_flag: logging.debug(' {}'.format(eto_day_path)) gdc.build_empty_raster(eto_hour_path, band_cnt=24, output_dtype=np.float32, output_proj=nldas_proj, output_cs=nldas_cs, output_extent=nldas_extent, output_fill_flag=True) # Sum all ETr/ETo images in each folder to generate a UTC day total etr_day_array = 0 eto_day_array = 0 # Process each hour file for input_name in sorted(files): logging.info(' {}'.format(input_name)) input_match = input_re.match(input_name) if input_match is None: logging.debug(' Regular expression didn\'t match, skipping') continue input_dt = dt.datetime(int(input_match.group('YEAR')), int(input_match.group('MONTH')), int(input_match.group('DAY'))) input_doy = int(input_dt.strftime('%j')) time_str = input_match.group('TIME') band_num = int(time_str[:2]) + 1 # if start_dt is not None and input_dt < start_dt: # continue # elif end_dt is not None and input_dt > end_dt: # continue # elif date_list and input_dt.date().isoformat() not in date_list: # continue if not daily_flag and time_str not in time_list: logging.debug(' Time not in list and not daily, skipping') continue input_path = os.path.join(root, input_name) logging.debug(' Time: {} {}'.format(input_dt.date(), time_str)) logging.debug(' Band: {}'.format(band_num)) # Determine band numbering/naming try: input_band_dict = grib_band_names(input_path) except RuntimeError as e: errors[input_path].append(e) logging.error(' RuntimeError: {} Skipping: {}'.format( e, input_path)) continue # Read input bands input_ds = gdal.Open(input_path) # Temperature should be in C for et_common.refet_hourly_func() if 'Temperature [K]' in input_band_dict.keys(): temp_band_units = 'K' temp_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['Temperature [K]'], mask_extent=nldas_extent, return_nodata=False) elif 'Temperature [C]' in input_band_dict.keys(): temp_band_units = 'C' temp_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['Temperature [C]'], mask_extent=nldas_extent, return_nodata=False) else: logging.error('Unknown Temperature units, skipping') logging.error(' {}'.format(input_band_dict.keys())) continue # DEADBEEF - Having issue with T appearing to be C but labeled as K # Try to determine temperature units from values temp_mean = float(np.nanmean(temp_array)) temp_units_dict = {20: 'C', 293: 'K'} temp_array_units = temp_units_dict[min( temp_units_dict, key=lambda x: abs(x - temp_mean))] if temp_array_units == 'K' and temp_band_units == 'K': logging.debug(' Converting temperature from K to C') temp_array -= 273.15 elif temp_array_units == 'C' and temp_band_units == 'C': pass elif temp_array_units == 'C' and temp_band_units == 'K': logging.debug(( ' Temperature units are K in the GRB band name, ' + 'but values appear to be C\n Mean temperature: {:.2f}\n' + ' Values will NOT be adjusted').format(temp_mean)) elif temp_array_units == 'K' and temp_band_units == 'C': logging.debug(( ' Temperature units are C in the GRB band name, ' + 'but values appear to be K\n Mean temperature: {:.2f}\n' + ' Values will be adjusted from K to C').format(temp_mean)) temp_array -= 273.15 try: sph_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['Specific humidity [kg/kg]'], mask_extent=nldas_extent, return_nodata=False) rs_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict[ 'Downward shortwave radiation flux [W/m^2]'], mask_extent=nldas_extent, return_nodata=False) wind_u_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['u-component of wind [m/s]'], mask_extent=nldas_extent, return_nodata=False) wind_v_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['v-component of wind [m/s]'], mask_extent=nldas_extent, return_nodata=False) input_ds = None except KeyError as e: errors[input_path].append(e) logging.error(' KeyError: {} Skipping: {}'.format( e, input_ds.GetDescription())) continue rs_array *= 0.0036 # W m-2 to MJ m-2 hr-1 wind_array = np.sqrt(wind_u_array**2 + wind_v_array**2) del wind_u_array, wind_v_array # ETr if etr_flag: etr_array = et_common.refet_hourly_func(temp_array, sph_array, rs_array, wind_array, zw=10, elev=elev_array, lat=lat_array, lon=lon_array, doy=input_doy, time=int(time_str) / 100, ref_type='ETR') if daily_flag: etr_day_array += etr_array if time_str in time_list: gdc.array_to_comp_raster(etr_array.astype(np.float32), etr_hour_path, band=band_num, stats_flag=False) del etr_array # ETo if eto_flag: eto_array = et_common.refet_hourly_func(temp_array, sph_array, rs_array, wind_array, zw=10, elev=elev_array, lat=lat_array, lon=lon_array, doy=input_doy, time=int(time_str) / 100, ref_type='ETO') if eto_flag and daily_flag: eto_day_array += eto_array if eto_flag and time_str in time_list: gdc.array_to_comp_raster(eto_array.astype(np.float32), eto_hour_path, band=band_num, stats_flag=False) del eto_array del temp_array, sph_array, rs_array, wind_array if stats_flag and etr_flag: gdc.raster_statistics(etr_hour_path) if stats_flag and eto_flag: gdc.raster_statistics(eto_hour_path) # Save the projected ETr/ETo as 32-bit floats if not day_skip_flag and daily_flag: if etr_flag: try: gdc.array_to_raster(etr_day_array.astype(np.float32), etr_day_path, output_geo=nldas_geo, output_proj=nldas_proj, stats_flag=stats_flag) except AttributeError: pass if eto_flag: try: gdc.array_to_raster(eto_day_array.astype(np.float32), eto_day_path, output_geo=nldas_geo, output_proj=nldas_proj, stats_flag=stats_flag) except AttributeError: pass del etr_day_array, eto_day_array if len(errors) > 0: logging.info('\nThe following errors were encountered:') for key, value in errors.items(): logging.error(' Filepath: {}, error: {}'.format(key, value)) logging.debug('\nScript Complete')
def main(grb_ws=os.getcwd(), ancillary_ws=os.getcwd(), output_ws=os.getcwd(), landsat_ws=None, start_date=None, end_date=None, times_str='', extent_path=None, output_extent=None, stats_flag=True, overwrite_flag=False): """Extract hourly NLDAS vapour pressure rasters Args: grb_ws (str): folder of NLDAS GRB files ancillary_ws (str): folder of ancillary rasters output_ws (str): folder of output rasters landsat_ws (str): folder of Landsat scenes or tar.gz files start_date (str): ISO format date (YYYY-MM-DD) end_date (str): ISO format date (YYYY-MM-DD) times (str): comma separated values and/or ranges of UTC hours (i.e. "1, 2, 5-8") Parsed with python_common.parse_int_set() extent_path (str): file path defining the output extent output_extent (list): decimal degrees values defining output extent stats_flag (bool): if True, compute raster statistics. Default is True. overwrite_flag (bool): if True, overwrite existing files Returns: None """ logging.info('\nExtracting NLDAS vapour pressure rasters') # input_fmt = 'NLDAS_FORA0125_H.A{:04d}{:02d}{:02d}.{}.002.grb' input_re = re.compile('NLDAS_FORA0125_H.A(?P<YEAR>\d{4})(?P<MONTH>\d{2})' + '(?P<DAY>\d{2}).(?P<TIME>\d{4}).002.grb$') output_folder = 'ea' output_fmt = 'ea_{:04d}{:02d}{:02d}_hourly_nldas.img' # output_fmt = 'ea_{:04d}{:02d}{:02d}_{:04d}_nldas.img' # If a date is not set, process 2017 try: start_dt = dt.datetime.strptime(start_date, '%Y-%m-%d') logging.debug(' Start date: {}'.format(start_dt)) except: start_dt = dt.datetime(2017, 1, 1) logging.info(' Start date: {}'.format(start_dt)) try: end_dt = dt.datetime.strptime(end_date, '%Y-%m-%d') logging.debug(' End date: {}'.format(end_dt)) except: end_dt = dt.datetime(2017, 12, 31) logging.info(' End date: {}'.format(end_dt)) # Only process a specific hours if not times_str: time_list = range(0, 24, 1) else: time_list = list(parse_int_set(times_str)) time_list = ['{:02d}00'.format(t) for t in time_list] # Assume NLDAS is NAD83 # input_epsg = 'EPSG:4269' # Ancillary raster paths mask_path = os.path.join(ancillary_ws, 'nldas_mask.img') elev_path = os.path.join(ancillary_ws, 'nldas_elev.img') # Build a date list from landsat_ws scene folders or tar.gz files date_list = [] if landsat_ws is not None and os.path.isdir(landsat_ws): logging.info('\nReading dates from Landsat IDs') logging.info(' {}'.format(landsat_ws)) landsat_re = re.compile( '^(?:LT04|LT05|LE07|LC08)_(?:\d{3})(?:\d{3})_' + '(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})') for root, dirs, files in os.walk(landsat_ws, topdown=True): # If root matches, don't explore subfolders try: landsat_match = landsat_re.match(os.path.basename(root)) date_list.append( dt.datetime.strptime('_'.join(landsat_match.groups()), '%Y_%m_%d').date().isoformat()) dirs[:] = [] except: pass for file in files: try: landsat_match = landsat_re.match(file) date_list.append( dt.datetime.strptime('_'.join(landsat_match.groups()), '%Y_%m_%d').date().isoformat()) except: pass date_list = sorted(list(set(date_list))) # elif landsat_ws is not None and os.path.isfile(landsat_ws): # with open(landsat_ws) as landsat_f: # This allows GDAL to throw Python Exceptions # gdal.UseExceptions() # mem_driver = gdal.GetDriverByName('MEM') # Get the NLDAS spatial reference from the mask raster nldas_ds = gdal.Open(mask_path) nldas_osr = gdc.raster_ds_osr(nldas_ds) nldas_proj = gdc.osr_proj(nldas_osr) nldas_cs = gdc.raster_ds_cellsize(nldas_ds, x_only=True) nldas_extent = gdc.raster_ds_extent(nldas_ds) nldas_geo = nldas_extent.geo(nldas_cs) nldas_x, nldas_y = nldas_extent.origin() nldas_ds = None logging.debug(' Projection: {}'.format(nldas_proj)) logging.debug(' Cellsize: {}'.format(nldas_cs)) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(nldas_extent)) # Subset data to a smaller extent if output_extent is not None: logging.info('\nComputing subset extent & geo') logging.debug(' Extent: {}'.format(output_extent)) nldas_extent = gdc.Extent(output_extent) nldas_extent.adjust_to_snap('EXPAND', nldas_x, nldas_y, nldas_cs) nldas_geo = nldas_extent.geo(nldas_cs) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(output_extent)) elif extent_path is not None: logging.info('\nComputing subset extent & geo') if extent_path.lower().endswith('.shp'): nldas_extent = gdc.feature_path_extent(extent_path) extent_osr = gdc.feature_path_osr(extent_path) extent_cs = None else: nldas_extent = gdc.raster_path_extent(extent_path) extent_osr = gdc.raster_path_osr(extent_path) extent_cs = gdc.raster_path_cellsize(extent_path, x_only=True) nldas_extent = gdc.project_extent(nldas_extent, extent_osr, nldas_osr, extent_cs) nldas_extent.adjust_to_snap('EXPAND', nldas_x, nldas_y, nldas_cs) nldas_geo = nldas_extent.geo(nldas_cs) logging.debug(' Geo: {}'.format(nldas_geo)) logging.debug(' Extent: {}'.format(nldas_extent)) logging.debug('') # Read the NLDAS mask array if present if mask_path and os.path.isfile(mask_path): mask_array, mask_nodata = gdc.raster_to_array(mask_path, mask_extent=nldas_extent, fill_value=0, return_nodata=True) mask_array = mask_array != mask_nodata else: mask_array = None # Read elevation arrays (or subsets?) elev_array = gdc.raster_to_array(elev_path, mask_extent=nldas_extent, return_nodata=False) pair_array = et_common.air_pressure_func(elev_array) # Build output folder var_ws = os.path.join(output_ws, output_folder) if not os.path.isdir(var_ws): os.makedirs(var_ws) # Each sub folder in the main folder has all imagery for 1 day # The path for each subfolder is the /YYYY/DOY # This approach will process files for target dates # for input_dt in date_range(start_dt, end_dt + dt.timedelta(1)): # logging.info(input_dt.date()) # Iterate all available files and check dates if necessary for root, folders, files in os.walk(grb_ws): root_split = os.path.normpath(root).split(os.sep) # If the year/doy is outside the range, skip if (re.match('\d{4}', root_split[-2]) and re.match('\d{3}', root_split[-1])): root_dt = dt.datetime.strptime( '{}_{}'.format(root_split[-2], root_split[-1]), '%Y_%j') logging.info('{}'.format(root_dt.date())) if ((start_dt is not None and root_dt < start_dt) or (end_dt is not None and root_dt > end_dt)): continue elif date_list and root_dt.date().isoformat() not in date_list: continue # If the year is outside the range, don't search subfolders elif re.match('\d{4}', root_split[-1]): root_year = int(root_split[-1]) logging.info('Year: {}'.format(root_year)) if ((start_dt is not None and root_year < start_dt.year) or (end_dt is not None and root_year > end_dt.year)): folders[:] = [] else: folders[:] = sorted(folders) continue else: continue # Create a single raster for each day with 24 bands # Each time step will be stored in a separate band output_name = output_fmt.format(root_dt.year, root_dt.month, root_dt.day) output_path = os.path.join(var_ws, str(root_dt.year), output_name) logging.debug(' {}'.format(output_path)) if os.path.isfile(output_path): if not overwrite_flag: logging.debug(' File already exists, skipping') continue else: logging.debug(' File already exists, removing existing') os.remove(output_path) logging.debug(' {}'.format(root)) if not os.path.isdir(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path)) gdc.build_empty_raster(output_path, band_cnt=24, output_dtype=np.float32, output_proj=nldas_proj, output_cs=nldas_cs, output_extent=nldas_extent, output_fill_flag=True) # Iterate through hourly files for input_name in sorted(files): logging.info(' {}'.format(input_name)) input_path = os.path.join(root, input_name) input_match = input_re.match(input_name) if input_match is None: logging.debug(' Regular expression didn\'t match, skipping') continue input_dt = dt.datetime(int(input_match.group('YEAR')), int(input_match.group('MONTH')), int(input_match.group('DAY'))) time_str = input_match.group('TIME') band_num = int(time_str[:2]) + 1 # if start_dt is not None and input_dt < start_dt: # continue # elif end_dt is not None and input_dt > end_dt: # continue # elif date_list and input_dt.date().isoformat() not in date_list: # continue if time_str not in time_list: logging.debug(' Time not in list, skipping') continue logging.debug(' Time: {} {}'.format(input_dt.date(), time_str)) logging.debug(' Band: {}'.format(band_num)) # Determine band numbering/naming input_band_dict = grib_band_names(input_path) # Compute vapour pressure from specific humidity input_ds = gdal.Open(input_path) sph_array = gdc.raster_ds_to_array( input_ds, band=input_band_dict['Specific humidity [kg/kg]'], mask_extent=nldas_extent, return_nodata=False) ea_array = (sph_array * pair_array) / (0.622 + 0.378 * sph_array) # Save the projected array as 32-bit floats gdc.array_to_comp_raster(ea_array.astype(np.float32), output_path, band=band_num) # gdc.block_to_raster( # ea_array.astype(np.float32), output_path, band=band) # gdc.array_to_raster( # ea_array.astype(np.float32), output_path, # output_geo=nldas_geo, output_proj=nldas_proj, # stats_flag=stats_flag) del sph_array input_ds = None if stats_flag: gdc.raster_statistics(output_path) logging.debug('\nScript Complete')
def main(netcdf_ws=os.getcwd(), ancillary_ws=os.getcwd(), output_ws=os.getcwd(), etr_flag=False, eto_flag=False, start_date=None, end_date=None, extent_path=None, output_extent=None, stats_flag=True, overwrite_flag=False): """Compute daily ETr/ETo from GRIDMET data Args: netcdf_ws (str): folder of GRIDMET netcdf files ancillary_ws (str): folder of ancillary rasters output_ws (str): folder of output rasters etr_flag (bool): if True, compute alfalfa reference ET (ETr) eto_flag (bool): if True, compute grass reference ET (ETo) start_date (str): ISO format date (YYYY-MM-DD) end_date (str): ISO format date (YYYY-MM-DD) extent_path (str): file path defining the output extent output_extent (list): decimal degrees values defining output extent stats_flag (bool): if True, compute raster statistics. Default is True. overwrite_flag (bool): if True, overwrite existing files Returns: None """ logging.info('\nComputing GRIDMET ETo/ETr') np.seterr(invalid='ignore') # Compute ETr and/or ETo if not etr_flag and not eto_flag: logging.info(' ETo/ETr flag(s) not set, defaulting to ETr') etr_flag = True # If a date is not set, process 2017 try: start_dt = dt.datetime.strptime(start_date, '%Y-%m-%d') logging.debug(' Start date: {}'.format(start_dt)) except: start_dt = dt.datetime(2017, 1, 1) logging.info(' Start date: {}'.format(start_dt)) try: end_dt = dt.datetime.strptime(end_date, '%Y-%m-%d') logging.debug(' End date: {}'.format(end_dt)) except: end_dt = dt.datetime(2017, 12, 31) logging.info(' End date: {}'.format(end_dt)) # Save GRIDMET lat, lon, and elevation arrays elev_raster = os.path.join(ancillary_ws, 'gridmet_elev.img') lat_raster = os.path.join(ancillary_ws, 'gridmet_lat.img') # Wind speed is measured at 2m zw = 10 etr_fmt = 'etr_{}_daily_gridmet.img' eto_fmt = 'eto_{}_daily_gridmet.img' # gridmet_re = re.compile('(?P<VAR>\w+)_(?P<YEAR>\d{4}).nc') # GRIDMET band name dictionary gridmet_band_dict = dict() gridmet_band_dict['pr'] = 'precipitation_amount' gridmet_band_dict['srad'] = 'surface_downwelling_shortwave_flux_in_air' gridmet_band_dict['sph'] = 'specific_humidity' gridmet_band_dict['tmmn'] = 'air_temperature' gridmet_band_dict['tmmx'] = 'air_temperature' gridmet_band_dict['vs'] = 'wind_speed' # Get extent/geo from elevation raster gridmet_ds = gdal.Open(elev_raster) gridmet_osr = gdc.raster_ds_osr(gridmet_ds) gridmet_proj = gdc.osr_proj(gridmet_osr) gridmet_cs = gdc.raster_ds_cellsize(gridmet_ds, x_only=True) gridmet_extent = gdc.raster_ds_extent(gridmet_ds) gridmet_full_geo = gridmet_extent.geo(gridmet_cs) gridmet_x, gridmet_y = gridmet_extent.origin() gridmet_ds = None logging.debug(' Projection: {}'.format(gridmet_proj)) logging.debug(' Cellsize: {}'.format(gridmet_cs)) logging.debug(' Geo: {}'.format(gridmet_full_geo)) logging.debug(' Extent: {}'.format(gridmet_extent)) # Subset data to a smaller extent if output_extent is not None: logging.info('\nComputing subset extent & geo') logging.debug(' Extent: {}'.format(output_extent)) gridmet_extent = gdc.Extent(output_extent) gridmet_extent.adjust_to_snap('EXPAND', gridmet_x, gridmet_y, gridmet_cs) gridmet_geo = gridmet_extent.geo(gridmet_cs) logging.debug(' Geo: {}'.format(gridmet_geo)) logging.debug(' Extent: {}'.format(output_extent)) elif extent_path is not None: logging.info('\nComputing subset extent & geo') if extent_path.lower().endswith('.shp'): gridmet_extent = gdc.feature_path_extent(extent_path) extent_osr = gdc.feature_path_osr(extent_path) extent_cs = None else: gridmet_extent = gdc.raster_path_extent(extent_path) extent_osr = gdc.raster_path_osr(extent_path) extent_cs = gdc.raster_path_cellsize(extent_path, x_only=True) gridmet_extent = gdc.project_extent(gridmet_extent, extent_osr, gridmet_osr, extent_cs) gridmet_extent.adjust_to_snap('EXPAND', gridmet_x, gridmet_y, gridmet_cs) gridmet_geo = gridmet_extent.geo(gridmet_cs) logging.debug(' Geo: {}'.format(gridmet_geo)) logging.debug(' Extent: {}'.format(gridmet_extent)) else: gridmet_geo = gridmet_full_geo # Get indices for slicing/clipping input arrays g_i, g_j = gdc.array_geo_offsets(gridmet_full_geo, gridmet_geo, cs=gridmet_cs) g_rows, g_cols = gridmet_extent.shape(cs=gridmet_cs) # Read the elevation and latitude arrays elev_array = gdc.raster_to_array(elev_raster, mask_extent=gridmet_extent, return_nodata=False) lat_array = gdc.raster_to_array(lat_raster, mask_extent=gridmet_extent, return_nodata=False) lat_array *= math.pi / 180 # Check elevation and latitude arrays if np.all(np.isnan(elev_array)): logging.error('\nERROR: The elevation array is all nodata, exiting\n') sys.exit() elif np.all(np.isnan(lat_array)): logging.error('\nERROR: The latitude array is all nodata, exiting\n') sys.exit() # Build output folder etr_ws = os.path.join(output_ws, 'etr') eto_ws = os.path.join(output_ws, 'eto') if etr_flag and not os.path.isdir(etr_ws): os.makedirs(etr_ws) if eto_flag and not os.path.isdir(eto_ws): os.makedirs(eto_ws) # By default, try to process all possible years if start_dt.year == end_dt.year: year_list = [str(start_dt.year)] year_list = sorted(map(str, range((start_dt.year), (end_dt.year + 1)))) # Process each year separately for year_str in year_list: logging.info("\nYear: {}".format(year_str)) year_int = int(year_str) year_days = int(dt.datetime(year_int, 12, 31).strftime('%j')) if start_dt is not None and year_int < start_dt.year: logging.debug(' Before start date, skipping') continue elif end_dt is not None and year_int > end_dt.year: logging.debug(' After end date, skipping') continue # Build input file path tmin_path = os.path.join(netcdf_ws, 'tmmn_{}.nc'.format(year_str)) tmax_path = os.path.join(netcdf_ws, 'tmmx_{}.nc'.format(year_str)) sph_path = os.path.join(netcdf_ws, 'sph_{}.nc'.format(year_str)) rs_path = os.path.join(netcdf_ws, 'srad_{}.nc'.format(year_str)) wind_path = os.path.join(netcdf_ws, 'vs_{}.nc'.format(year_str)) # Check that all input files are present missing_flag = False for input_path in [tmin_path, tmax_path, sph_path, rs_path, wind_path]: if not os.path.isfile(input_path): logging.debug( ' Input NetCDF doesn\'t exist\n {}'.format(input_path)) missing_flag = True if missing_flag: logging.debug(' skipping') continue logging.debug(" {}".format(tmin_path)) logging.debug(" {}".format(tmax_path)) logging.debug(" {}".format(sph_path)) logging.debug(" {}".format(rs_path)) logging.debug(" {}".format(wind_path)) # Create a single raster for each year with 365 bands # Each day will be stored in a separate band etr_raster = os.path.join(etr_ws, etr_fmt.format(year_str)) eto_raster = os.path.join(eto_ws, eto_fmt.format(year_str)) if etr_flag and (overwrite_flag or not os.path.isfile(etr_raster)): logging.debug(' {}'.format(etr_raster)) gdc.build_empty_raster(etr_raster, band_cnt=366, output_dtype=np.float32, output_proj=gridmet_proj, output_cs=gridmet_cs, output_extent=gridmet_extent, output_fill_flag=True) if eto_flag and (overwrite_flag or not os.path.isfile(eto_raster)): logging.debug(' {}'.format(eto_raster)) gdc.build_empty_raster(eto_raster, band_cnt=366, output_dtype=np.float32, output_proj=gridmet_proj, output_cs=gridmet_cs, output_extent=gridmet_extent, output_fill_flag=True) # DEADBEEF - Need to find a way to test if both of these conditionals # did not pass and pass logging debug message to user # Read in the GRIDMET NetCDF file tmin_nc_f = netCDF4.Dataset(tmin_path, 'r') tmax_nc_f = netCDF4.Dataset(tmax_path, 'r') sph_nc_f = netCDF4.Dataset(sph_path, 'r') rs_nc_f = netCDF4.Dataset(rs_path, 'r') wind_nc_f = netCDF4.Dataset(wind_path, 'r') logging.info(' Reading NetCDFs into memory') # Immediatly clip input arrays to save memory tmin_nc = tmin_nc_f.variables[ gridmet_band_dict['tmmn']][:, g_i:g_i + g_cols, g_j:g_j + g_rows].copy() tmax_nc = tmax_nc_f.variables[ gridmet_band_dict['tmmx']][:, g_i:g_i + g_cols, g_j:g_j + g_rows].copy() sph_nc = sph_nc_f.variables[gridmet_band_dict['sph']][:, g_i:g_i + g_cols, g_j:g_j + g_rows].copy() rs_nc = rs_nc_f.variables[gridmet_band_dict['srad']][:, g_i:g_i + g_cols, g_j:g_j + g_rows].copy() wind_nc = wind_nc_f.variables[gridmet_band_dict['vs']][:, g_i:g_i + g_cols, g_j:g_j + g_rows].copy() # tmin_nc = tmin_nc_f.variables[gridmet_band_dict['tmmn']][:] # tmax_nc = tmax_nc_f.variables[gridmet_band_dict['tmmx']][:] # sph_nc = sph_nc_f.variables[gridmet_band_dict['sph']][:] # rs_nc = rs_nc_f.variables[gridmet_band_dict['srad']][:] # wind_nc = wind_nc_f.variables[gridmet_band_dict['vs']][:] # Transpose arrays back to row x col tmin_nc = np.transpose(tmin_nc, (0, 2, 1)) tmax_nc = np.transpose(tmax_nc, (0, 2, 1)) sph_nc = np.transpose(sph_nc, (0, 2, 1)) rs_nc = np.transpose(rs_nc, (0, 2, 1)) wind_nc = np.transpose(wind_nc, (0, 2, 1)) # A numpy array is returned when slicing a masked array # if there are no masked pixels # This is a hack to force the numpy array back to a masked array # For now assume all arrays need to be converted if type(tmin_nc) != np.ma.core.MaskedArray: tmin_nc = np.ma.core.MaskedArray( tmin_nc, np.zeros(tmin_nc.shape, dtype=bool)) if type(tmax_nc) != np.ma.core.MaskedArray: tmax_nc = np.ma.core.MaskedArray( tmax_nc, np.zeros(tmax_nc.shape, dtype=bool)) if type(sph_nc) != np.ma.core.MaskedArray: sph_nc = np.ma.core.MaskedArray(sph_nc, np.zeros(sph_nc.shape, dtype=bool)) if type(rs_nc) != np.ma.core.MaskedArray: rs_nc = np.ma.core.MaskedArray(rs_nc, np.zeros(rs_nc.shape, dtype=bool)) if type(wind_nc) != np.ma.core.MaskedArray: wind_nc = np.ma.core.MaskedArray( wind_nc, np.zeros(wind_nc.shape, dtype=bool)) # Check all valid dates in the year year_dates = date_range(dt.datetime(year_int, 1, 1), dt.datetime(year_int + 1, 1, 1)) for date_dt in year_dates: if start_dt is not None and date_dt < start_dt: logging.debug(' {} - before start date, skipping'.format( date_dt.date())) continue elif end_dt is not None and date_dt > end_dt: logging.debug(' {} - after end date, skipping'.format( date_dt.date())) continue else: logging.info(' {}'.format(date_dt.date())) doy = int(date_dt.strftime('%j')) doy_i = range(1, year_days + 1).index(doy) # Arrays are being read as masked array with a fill value of -9999 # Convert to basic numpy array arrays with nan values try: tmin_ma = tmin_nc[doy_i, :, :] except IndexError: logging.info(' date not in netcdf, skipping') continue tmin_array = tmin_ma.data.astype(np.float32) tmin_nodata = float(tmin_ma.fill_value) tmin_array[tmin_array == tmin_nodata] = np.nan try: tmax_ma = tmax_nc[doy_i, :, :] except IndexError: logging.info(' date not in netcdf, skipping') continue tmax_array = tmax_ma.data.astype(np.float32) tmax_nodata = float(tmax_ma.fill_value) tmax_array[tmax_array == tmax_nodata] = np.nan try: sph_ma = sph_nc[doy_i, :, :] except IndexError: logging.info(' date not in netcdf, skipping') continue sph_array = sph_ma.data.astype(np.float32) sph_nodata = float(sph_ma.fill_value) sph_array[sph_array == sph_nodata] = np.nan try: rs_ma = rs_nc[doy_i, :, :] except IndexError: logging.info(' date not in netcdf, skipping') continue rs_array = rs_ma.data.astype(np.float32) rs_nodata = float(rs_ma.fill_value) rs_array[rs_array == rs_nodata] = np.nan try: wind_ma = wind_nc[doy_i, :, :] except IndexError: logging.info(' date not in netcdf, skipping') continue wind_array = wind_ma.data.astype(np.float32) wind_nodata = float(wind_ma.fill_value) wind_array[wind_array == wind_nodata] = np.nan del tmin_ma, tmax_ma, sph_ma, rs_ma, wind_ma # Since inputs are netcdf, need to create GDAL raster # datasets in order to use gdal_common functions # Create an in memory dataset of the full ETo array tmin_ds = gdc.array_to_mem_ds( tmin_array, output_geo=gridmet_geo, # tmin_array, output_geo=gridmet_full_geo, output_proj=gridmet_proj) tmax_ds = gdc.array_to_mem_ds( tmax_array, output_geo=gridmet_geo, # tmax_array, output_geo=gridmet_full_geo, output_proj=gridmet_proj) sph_ds = gdc.array_to_mem_ds( sph_array, output_geo=gridmet_geo, # sph_array, output_geo=gridmet_full_geo, output_proj=gridmet_proj) rs_ds = gdc.array_to_mem_ds( rs_array, output_geo=gridmet_geo, # rs_array, output_geo=gridmet_full_geo, output_proj=gridmet_proj) wind_ds = gdc.array_to_mem_ds( wind_array, output_geo=gridmet_geo, # wind_array, output_geo=gridmet_full_geo, output_proj=gridmet_proj) # Then extract the subset from the in memory dataset tmin_array = gdc.raster_ds_to_array(tmin_ds, 1, mask_extent=gridmet_extent, return_nodata=False) tmax_array = gdc.raster_ds_to_array(tmax_ds, 1, mask_extent=gridmet_extent, return_nodata=False) sph_array = gdc.raster_ds_to_array(sph_ds, 1, mask_extent=gridmet_extent, return_nodata=False) rs_array = gdc.raster_ds_to_array(rs_ds, 1, mask_extent=gridmet_extent, return_nodata=False) wind_array = gdc.raster_ds_to_array(wind_ds, 1, mask_extent=gridmet_extent, return_nodata=False) del tmin_ds, tmax_ds, sph_ds, rs_ds, wind_ds # Adjust units tmin_array -= 273.15 tmax_array -= 273.15 rs_array *= 0.0864 # ETr/ETo if etr_flag: etr_array = et_common.refet_daily_func(tmin_array, tmax_array, sph_array, rs_array, wind_array, zw, elev_array, lat_array, doy, 'ETR') if eto_flag: eto_array = et_common.refet_daily_func(tmin_array, tmax_array, sph_array, rs_array, wind_array, zw, elev_array, lat_array, doy, 'ETO') # del tmin_array, tmax_array, sph_array, rs_array, wind_array # Save the projected array as 32-bit floats if etr_flag: gdc.array_to_comp_raster(etr_array.astype(np.float32), etr_raster, band=doy, stats_flag=False) # gdc.array_to_raster( # etr_array.astype(np.float32), etr_raster, # output_geo=gridmet_geo, output_proj=gridmet_proj, # stats_flag=stats_flag) del etr_array if eto_flag: gdc.array_to_comp_raster(eto_array.astype(np.float32), eto_raster, band=doy, stats_flag=False) # gdc.array_to_raster( # eto_array.astype(np.float32), eto_raster, # output_geo=gridmet_geo, output_proj=gridmet_proj, # stats_flag=stats_flag) del eto_array del tmin_nc del tmax_nc del sph_nc del rs_nc del wind_nc tmin_nc_f.close() tmax_nc_f.close() sph_nc_f.close() rs_nc_f.close() wind_nc_f.close() del tmin_nc_f, tmax_nc_f, sph_nc_f, rs_nc_f, wind_nc_f if stats_flag and etr_flag: gdc.raster_statistics(etr_raster) if stats_flag and eto_flag: gdc.raster_statistics(eto_raster) logging.debug('\nScript Complete')
def main(gis_ws, cdl_ws, cdl_year, study_area_path, study_area_buffer=None, overwrite_flag=False, pyramids_flag=False, stats_flag=False): """Build study area raster from a target extent and rebuild color table Args: gis_ws (str): Folder/workspace path of the GIS data for the project cdl_ws (str): Folder/workspace path of the GIS data for the project cdl_year (str): Cropland Data Layer year zone_path (str): File path to study area shapefile zone_buffer (float): Distance to buffer input extent Units will be the same as the extent spatial reference overwrite_flag (bool): If True, overwrite output raster pyramids_flag (bool): If True, build pyramids/overviews for the output raster stats_flag (bool): If True, compute statistics for the output raster Returns: None """ scratch_ws = os.path.join(gis_ws, 'scratch') zone_raster_path = os.path.join(scratch_ws, 'zone_raster.img') zone_polygon_path = os.path.join(scratch_ws, 'zone_polygon.shp') # If multiple years were passed in, only use the first one cdl_year = list(util.parse_int_set(cdl_year))[0] cdl_format = '{0}_30m_cdls.img' cdl_path = os.path.join(cdl_ws, cdl_format.format(cdl_year)) # Check input folders if not os.path.isdir(gis_ws): logging.error(('\nERROR: The GIS workspace {} ' + 'does not exist').format(gis_ws)) sys.exit() elif not os.path.isfile(cdl_path): logging.error(('\nERROR: The input CDL raster {} ' + 'does not exist').format(cdl_path)) sys.exit() elif not os.path.isfile(study_area_path): logging.error(('\nERROR: The extent shapefile {} ' + 'does not exist').format(study_area_path)) sys.exit() if not os.path.isdir(scratch_ws): os.makedirs(scratch_ws) logging.info('\nGIS Workspace: {}'.format(gis_ws)) logging.info('Scratch Workspace: {}'.format(scratch_ws)) # Reference all output rasters to CDL # output_osr = gdc.raster_path_osr(cdl_path) output_proj = gdc.raster_path_proj(cdl_path) output_cs = gdc.raster_path_cellsize(cdl_path)[0] output_x, output_y = gdc.raster_path_origin(cdl_path) # output_osr = gdc.proj4_osr( # "+proj=aea +lat_1=29.5 +lat_2=45.5 +lat_0=23 +lon_0=-96 "+ # "+x_0=0 +y_0=0 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m "+ # "+no_defs") # output_cs = 30 # output_x, output_y = 15, 15 if pyramids_flag: levels = '2 4 8 16 32 64 128' # gdal.SetConfigOption('USE_RRD', 'YES') # gdal.SetConfigOption('HFA_USE_RRD', 'YES') # Overwrite if os.path.isfile(zone_raster_path) and overwrite_flag: subprocess.call(['gdalmanage', 'delete', zone_raster_path]) if os.path.isfile(zone_polygon_path) and overwrite_flag: remove_file(zone_polygon_path) # subprocess.call(['gdalmanage', 'delete', zone_polygon_path]) # Project extent shapefile to CDL spatial reference if not os.path.isfile(zone_polygon_path): # Project study area extent to the input/CDL spatial reference logging.info('Projecting extent shapefile') subprocess.call([ 'ogr2ogr', '-overwrite', '-preserve_fid', '-t_srs', str(output_proj), zone_polygon_path, study_area_path ]) # Get the study area extent from the projected shapefile clip_extent = gdc.feature_path_extent(zone_polygon_path) logging.debug('Clip Extent: {}'.format(clip_extent)) # This will buffer in the CDL spatial reference & units if study_area_buffer is not None: logging.debug('Buffering: {}'.format(study_area_buffer)) clip_extent.buffer_extent(study_area_buffer) logging.debug('Clip Extent: {}'.format(clip_extent)) clip_extent.adjust_to_snap('EXPAND', output_x, output_y, output_cs) logging.debug('Clip Extent: {}'.format(clip_extent)) # gdal_translate uses ul/lr corners, not extent clip_ullr = clip_extent.ul_lr_swap() logging.debug('Clip UL/LR: {}'.format(clip_ullr)) # Rasterize extent shapefile for masking in other scripts if (not os.path.isfile(zone_raster_path) and os.path.isfile(zone_polygon_path)): logging.info('Rasterizing shapefile') subprocess.call([ 'gdal_rasterize', '-of', 'HFA', '-ot', 'Byte', '-burn', '1', '-init', '0', '-a_nodata', '255', '-co', 'COMPRESSED=YES' ] + ['-te'] + str(clip_extent).split() + [ '-tr', str(output_cs), str(output_cs), zone_polygon_path, zone_raster_path ]) # remove_file(zonse_polygon_path) # Statistics if stats_flag and os.path.isfile(zone_raster_path): logging.info('Computing statistics') logging.debug(' {}'.format(zone_raster_path)) subprocess.call([ 'gdalinfo', '-stats', '-nomd', '-noct', '-norat', zone_raster_path ]) # Pyramids if pyramids_flag and os.path.isfile(zone_raster_path): logging.info('Building statistics') logging.debug(' {}'.format(zone_raster_path)) subprocess.call(['gdaladdo', '-ro', zone_raster_path] + levels.split())
def main(img_ws=os.getcwd(), ancillary_ws=os.getcwd(), output_ws=os.getcwd(), etr_flag=False, eto_flag=False, start_date=None, end_date=None, extent_path=None, output_extent=None, stats_flag=True, overwrite_flag=False, use_cimis_eto_flag=False): """Compute daily ETr/ETo from CIMIS data Args: img_ws (str): root folder of GRIDMET data ancillary_ws (str): folder of ancillary rasters output_ws (str): folder of output rasters etr_flag (bool): if True, compute alfalfa reference ET (ETr) eto_flag (bool): if True, compute grass reference ET (ETo) start_date (str): ISO format date (YYYY-MM-DD) end_date (str): ISO format date (YYYY-MM-DD) extent_path (str): file path defining the output extent output_extent (list): decimal degrees values defining output extent stats_flag (bool): if True, compute raster statistics. Default is True. overwrite_flag (bool): If True, overwrite existing files use_cimis_eto_flag (bool): if True, use CIMIS ETo raster if one of the component rasters is missing and ETo/ETr cannot be computed Returns: None """ logging.info('\nComputing CIMIS ETo/ETr') np.seterr(invalid='ignore') # Use CIMIS ETo raster directly instead of computing from components # Currently this will only be applied if one of the inputs is missing use_cimis_eto_flag = True # Compute ETr and/or ETo if not etr_flag and not eto_flag: logging.info(' ETo/ETr flag(s) not set, defaulting to ETr') etr_flag = True # If a date is not set, process 2017 try: start_dt = dt.datetime.strptime(start_date, '%Y-%m-%d') logging.debug(' Start date: {}'.format(start_dt)) except: start_dt = dt.datetime(2017, 1, 1) logging.info(' Start date: {}'.format(start_dt)) try: end_dt = dt.datetime.strptime(end_date, '%Y-%m-%d') logging.debug(' End date: {}'.format(end_dt)) except: end_dt = dt.datetime(2017, 12, 31) logging.info(' End date: {}'.format(end_dt)) etr_folder = 'etr' eto_folder = 'eto' etr_fmt = 'etr_{}_daily_cimis.img' eto_fmt = 'eto_{}_daily_cimis.img' # DEM for air pressure calculation mask_raster = os.path.join(ancillary_ws, 'cimis_mask.img') dem_raster = os.path.join(ancillary_ws, 'cimis_elev.img') lat_raster = os.path.join(ancillary_ws, 'cimis_lat.img') # lon_raster = os.path.join(ancillary_ws, 'cimis_lon.img') # Interpolate zero windspeed pixels # interpolate_zero_u2_flag = False # Interpolate edge and coastal cells # interpolate_edge_flag = False # Resample type # 0 = GRA_NearestNeighbour, Nearest neighbour (select on one input pixel) # 1 = GRA_Bilinear,Bilinear (2x2 kernel) # 2 = GRA_Cubic, Cubic Convolution Approximation (4x4 kernel) # 3 = GRA_CubicSpline, Cubic B-Spline Approximation (4x4 kernel) # 4 = GRA_Lanczos, Lanczos windowed sinc interpolation (6x6 kernel) # 5 = GRA_Average, Average (computes the average of all non-NODATA contributing pixels) # 6 = GRA_Mode, Mode (selects the value which appears most often of all the sampled points) resample_type = gdal.GRA_CubicSpline # Wind speed is measured at 2m zw = 2 # Output workspaces etr_ws = os.path.join(output_ws, etr_folder) eto_ws = os.path.join(output_ws, eto_folder) if etr_flag and not os.path.isdir(etr_ws): os.makedirs(etr_ws) if eto_flag and not os.path.isdir(eto_ws): os.makedirs(eto_ws) # Check ETr/ETo functions test_flag = False # Check that the daily_refet_func produces the correct values if test_flag: doy_test = 245 elev_test = 1050.0 lat_test = 39.9396 * math.pi / 180 tmin_test = 11.07 tmax_test = 34.69 rs_test = 22.38 u2_test = 1.94 zw_test = 2.5 tdew_test = -3.22 ea_test = et_common.saturation_vapor_pressure_func(tdew_test) pair_test = 101.3 * np.power((285 - 0.0065 * elev_test) / 285, 5.26) q_test = 0.622 * ea_test / (pair_test - (0.378 * ea_test)) etr = float( et_common.daily_refet_func(tmin_test, tmax_test, q_test, rs_test, u2_test, zw_test, elev_test, doy_test, lat_test, 'ETR')) eto = float( et_common.daily_refet_func(tmin_test, tmax_test, q_test, rs_test, u2_test, zw_test, elev_test, doy_test, lat_test, 'ETO')) print('ETr: 8.89', etr) print('ETo: 6.16', eto) sys.exit() # Get CIMIS grid properties from mask cimis_mask_ds = gdal.Open(mask_raster) cimis_osr = gdc.raster_ds_osr(cimis_mask_ds) cimis_proj = gdc.osr_proj(cimis_osr) cimis_cs = gdc.raster_ds_cellsize(cimis_mask_ds, x_only=True) cimis_extent = gdc.raster_ds_extent(cimis_mask_ds) cimis_full_geo = cimis_extent.geo(cimis_cs) cimis_x, cimis_y = cimis_extent.origin() cimis_mask_ds = None logging.debug(' Projection: {}'.format(cimis_proj)) logging.debug(' Cellsize: {}'.format(cimis_cs)) logging.debug(' Geo: {}'.format(cimis_full_geo)) logging.debug(' Extent: {}'.format(cimis_extent)) # Manually set CIMIS grid properties # cimis_extent = gdc.Extent((-400000, -650000, 600000, 454000)) # cimis_cs = 2000 # cimis_geo = gdc.extent_geo(cimis_extent, cellsize) # cimis_epsg = 3310 # NAD_1983_California_Teale_Albers # cimis_x, cimis_y = (0,0) # Subset data to a smaller extent if output_extent is not None: logging.info('\nComputing subset extent & geo') logging.debug(' Extent: {}'.format(output_extent)) cimis_extent = gdc.Extent(output_extent) cimis_extent.adjust_to_snap('EXPAND', cimis_x, cimis_y, cimis_cs) cimis_geo = cimis_extent.geo(cimis_cs) logging.debug(' Geo: {}'.format(cimis_geo)) logging.debug(' Extent: {}'.format(output_extent)) elif extent_path is not None: logging.info('\nComputing subset extent & geo') if extent_path.lower().endswith('.shp'): cimis_extent = gdc.feature_path_extent(extent_path) extent_osr = gdc.feature_path_osr(extent_path) extent_cs = None else: cimis_extent = gdc.raster_path_extent(extent_path) extent_osr = gdc.raster_path_osr(extent_path) extent_cs = gdc.raster_path_cellsize(extent_path, x_only=True) cimis_extent = gdc.project_extent(cimis_extent, extent_osr, cimis_osr, extent_cs) cimis_extent.adjust_to_snap('EXPAND', cimis_x, cimis_y, cimis_cs) cimis_geo = cimis_extent.geo(cimis_cs) logging.debug(' Geo: {}'.format(cimis_geo)) logging.debug(' Extent: {}'.format(cimis_extent)) else: cimis_geo = cimis_full_geo # Latitude lat_array = gdc.raster_to_array(lat_raster, mask_extent=cimis_extent, return_nodata=False) lat_array = lat_array.astype(np.float32) lat_array *= math.pi / 180 # Elevation data elev_array = gdc.raster_to_array(dem_raster, mask_extent=cimis_extent, return_nodata=False) elev_array = elev_array.astype(np.float32) # Process each year in the input workspace logging.info("") for year_str in sorted(os.listdir(img_ws)): logging.debug('{}'.format(year_str)) if not re.match('^\d{4}$', year_str): logging.debug(' Not a 4 digit year folder, skipping') continue year_ws = os.path.join(img_ws, year_str) year_int = int(year_str) # year_days = int(dt.datetime(year_int, 12, 31).strftime('%j')) if start_dt is not None and year_int < start_dt.year: logging.debug(' Before start date, skipping') continue elif end_dt is not None and year_int > end_dt.year: logging.debug(' After end date, skipping') continue logging.info('{}'.format(year_str)) # Output paths etr_raster = os.path.join(etr_ws, etr_fmt.format(year_str)) eto_raster = os.path.join(eto_ws, eto_fmt.format(year_str)) if etr_flag and (overwrite_flag or not os.path.isfile(etr_raster)): logging.debug(' {}'.format(etr_raster)) gdc.build_empty_raster(etr_raster, band_cnt=366, output_dtype=np.float32, output_proj=cimis_proj, output_cs=cimis_cs, output_extent=cimis_extent, output_fill_flag=True) if eto_flag and (overwrite_flag or not os.path.isfile(eto_raster)): logging.debug(' {}'.format(eto_raster)) gdc.build_empty_raster(eto_raster, band_cnt=366, output_dtype=np.float32, output_proj=cimis_proj, output_cs=cimis_cs, output_extent=cimis_extent, output_fill_flag=True) # Process each date in the year for date_str in sorted(os.listdir(year_ws)): logging.debug('{}'.format(date_str)) try: date_dt = dt.datetime.strptime(date_str, '%Y_%m_%d') except ValueError: logging.debug( ' Invalid folder date format (YYYY_MM_DD), skipping') continue if start_dt is not None and date_dt < start_dt: logging.debug(' Before start date, skipping') continue elif end_dt is not None and date_dt > end_dt: logging.debug(' After end date, skipping') continue logging.info(date_str) date_ws = os.path.join(year_ws, date_str) doy = int(date_dt.strftime('%j')) # Set file paths tmax_path = os.path.join(date_ws, 'Tx.img') tmin_path = os.path.join(date_ws, 'Tn.img') tdew_path = os.path.join(date_ws, 'Tdew.img') rso_path = os.path.join(date_ws, 'Rso.img') rs_path = os.path.join(date_ws, 'Rs.img') u2_path = os.path.join(date_ws, 'U2.img') eto_path = os.path.join(date_ws, 'ETo.img') # k_path = os.path.join(date_ws, 'K.img') # rnl_path = os.path.join(date_ws, 'Rnl.img') input_list = [ tmin_path, tmax_path, tdew_path, u2_path, rs_path, rso_path ] # If any input raster is missing, skip the day # Unless ETo is present (and use_cimis_eto_flag is True) day_skip_flag = False for t_path in input_list: if not os.path.isfile(t_path): logging.info(' {} is missing'.format(t_path)) day_skip_flag = True if (day_skip_flag and use_cimis_eto_flag and os.path.isfile(eto_path)): logging.info(' Using CIMIS ETo directly') eto_array = gdc.raster_to_array(eto_path, 1, cimis_extent, return_nodata=False) eto_array = eto_array.astype(np.float32) if not np.any(eto_array): logging.info(' {} is empty or missing'.format(eto_path)) logging.info(' Skipping date') continue # ETr if etr_flag: gdc.array_to_comp_raster(1.2 * eto_array, etr_raster, band=doy, stats_flag=False) # gdc.array_to_raster( # 1.2 * eto_array, etr_raster, # output_geo=cimis_geo, output_proj=cimis_proj, # stats_flag=stats_flag) # ETo if eto_flag: gdc.array_to_comp_raster(eto_array, eto_raster, band=doy, stats_flag=False) # gdc.array_to_raster( # eto_array, eto_raster, # output_geo=cimis_geo, output_proj=cimis_proj, # stats_flag=stats_flag) del eto_array continue elif not day_skip_flag: # Read in rasters # DEADBEEF - Read with extent since some arrays are too big # i.e. 2012-03-21, 2013-03-20, 2014-02-27 tmin_array = gdc.raster_to_array(tmin_path, 1, cimis_extent, return_nodata=False) tmax_array = gdc.raster_to_array(tmax_path, 1, cimis_extent, return_nodata=False) tdew_array = gdc.raster_to_array(tdew_path, 1, cimis_extent, return_nodata=False) rso_array = gdc.raster_to_array(rso_path, 1, cimis_extent, return_nodata=False) rs_array = gdc.raster_to_array(rs_path, 1, cimis_extent, return_nodata=False) u2_array = gdc.raster_to_array(u2_path, 1, cimis_extent, return_nodata=False) # k_array = gdc.raster_to_array( # k_path, 1, cimis_extent, return_nodata=False) # rnl_array = gdc.raster_to_array( # rnl_path, 1, cimis_extent, return_nodata=False) # Check that all input arrays have data for t_name, t_array in [[tmin_path, tmin_array], [tmax_path, tmax_array], [tdew_path, tdew_array], [u2_path, u2_array], [rs_path, rs_array]]: if not np.any(t_array): logging.warning( ' {} is empty or missing'.format(t_name)) day_skip_flag = True if day_skip_flag: logging.warning(' Skipping date') continue # DEADBEEF - Some arrays have a 500m cellsize # i.e. 2011-07-25, 2010-01-01 -> 2010-07-27 tmin_array = rescale_array_func(tmin_array, elev_array, 'tmin') tmax_array = rescale_array_func(tmax_array, elev_array, 'tmax') tdew_array = rescale_array_func(tdew_array, elev_array, 'tdew') rso_array = rescale_array_func(rso_array, elev_array, 'rso') rs_array = rescale_array_func(rs_array, elev_array, 'rs') u2_array = rescale_array_func(u2_array, elev_array, 'u2') # k_array = rescale_array_func(k_array, elev_array, 'k') # rnl_array = rescale_array_func(rnl_array, elev_array, 'rnl') # Back calculate q from tdew by first calculating ea from tdew es_array = et_common.saturation_vapor_pressure_func(tdew_array) pair_array = et_common.air_pressure_func(elev_array) q_array = 0.622 * es_array / (pair_array - (0.378 * es_array)) del es_array, pair_array, tdew_array # Back calculate rhmin/rhmax from tdew # ea_tmax = et_common.saturation_vapor_pressure_func(tmax_array) # ea_tmin = et_common.saturation_vapor_pressure_func(tmin_array) # rhmin = ea_tdew * 2 / (ea_tmax + ea_tmin); # rhmax = ea_tdew * 2 / (ea_tmax + ea_tmin); # del ea_tmax, ea_tmin # ETr if etr_flag: etr_array = et_common.refet_daily_func(tmin_array, tmax_array, q_array, rs_array, u2_array, zw, elev_array, lat_array, doy, ref_type='ETR', rso_type='ARRAY', rso=rso_array) gdc.array_to_comp_raster(etr_array.astype(np.float32), etr_raster, band=doy, stats_flag=False) # gdc.array_to_raster( # etr_array.astype(np.float32), etr_raster, # output_geo=cimis_geo, output_proj=cimis_proj, # stats_flag=stats_flag) del etr_array # ETo if eto_flag: eto_array = et_common.refet_daily_func(tmin_array, tmax_array, q_array, rs_array, u2_array, zw, elev_array, lat_array, doy, ref_type='ETO', rso_type='ARRAY', rso=rso_array) gdc.array_to_comp_raster(eto_array.astype(np.float32), eto_raster, band=doy, stats_flag=False) # gdc.array_to_raster( # eto_array.astype(np.float32), eto_raster, # output_geo=cimis_geo, output_proj=cimis_proj, # stats_flag=stats_flag) del eto_array # Cleanup del tmin_array, tmax_array, u2_array, rs_array, q_array # del rnl, rs, rso else: logging.info(' Skipping date') continue if stats_flag and etr_flag: gdc.raster_statistics(etr_raster) if stats_flag and eto_flag: gdc.raster_statistics(eto_raster) logging.debug('\nScript Complete')