def finish_nc(self, file_nc): ''' adds metadata to nc file. change awsm version which is hardcoded. Also add_proj has UTM 13 NAD23 I believe''' h = '[{}] Data added or updated'.format( datetime.now().strftime('%Y-%m-%d %H:%M:%S')) # file_nc.setncattr_string('last modified', h) # file_nc.setncattr_string('AWSM version', '9.15') #FIX THIS! as below line commented # # snow.setncattr_string('AWSM version', 'myawsm.gitVersion') file_nc.setncattr_string('Conventions', 'CF-1.6') file_nc.setncattr_string( 'institution', 'USDA Agricultural Research Service, Northwest Watershed Research Center' ) # file_nc.setncattr_string('last_modified', h) file_nc.setncattr_string('blank', 'from Hedrick WRR2018 FILL IN STRING') file_nc = add_proj(file_nc, 26711) file_nc.sync() file_nc.close()
def output_files(options, init, start_date, myawsm): """ Create the snow and em output netCDF file Args: options: dictionary of Snobal options init: dictionary of Snobal initialization images start_date: date for time units in files myawsm: awsm class """ fmt = '%Y-%m-%d %H:%M:%S' # chunk size cs = None # ------------------------------------------------------------------------ # EM netCDF m = {} m['name'] = [ 'net_rad', 'sensible_heat', 'latent_heat', 'snow_soil', 'precip_advected', 'sum_EB', 'evaporation', 'snowmelt', 'SWI', 'cold_content' ] m['units'] = [ 'W m-2', 'W m-2', 'W m-2', 'W m-2', 'W m-2', 'W m-2', 'kg m-2', 'kg m-2', 'kg or mm m-2', 'J m-2' ] m['description'] = [ 'Average net all-wave radiation', 'Average sensible heat transfer', 'Average latent heat exchange', 'Average snow/soil heat exchange', 'Average advected heat from precipitation', 'Average sum of EB terms for snowcover', 'Total evaporation', 'Total snowmelt', 'Total runoff', 'Snowcover cold content' ] emname = myawsm.em_name + '.nc' # if myawsm.restart_run: # emname = 'em_restart_{}.nc'.format(myawsm.restart_hr) # start_date = myawsm.restart_date netcdfFile = os.path.join(options['output']['location'], emname) if os.path.isfile(netcdfFile): myawsm._logger.warning( 'Opening {}, data may be overwritten!'.format(netcdfFile)) em = nc.Dataset(netcdfFile, 'a') h = '[{}] Data added or updated'.format(datetime.now().strftime(fmt)) setattr(em, 'last_modified', h) if 'projection' not in em.variables.keys(): em = add_proj(em, None, myawsm.topo.topoConfig['filename']) else: em = nc.Dataset(netcdfFile, 'w') dimensions = ('time', 'y', 'x') # create the dimensions em.createDimension('time', None) em.createDimension('y', len(init['y'])) em.createDimension('x', len(init['x'])) # create some variables em.createVariable('time', 'f', dimensions[0]) em.createVariable('y', 'f', dimensions[1]) em.createVariable('x', 'f', dimensions[2]) # setattr(em.variables['time'], 'units', 'hours since %s' % options['time']['start_date']) setattr(em.variables['time'], 'units', 'hours since %s' % start_date.tz_localize(None)) setattr(em.variables['time'], 'time_zone', str(myawsm.tzinfo).lower()) setattr(em.variables['time'], 'calendar', 'standard') # setattr(em.variables['time'], 'time_zone', time_zone) em.variables['x'][:] = init['x'] em.variables['y'][:] = init['y'] # em image for i, v in enumerate(m['name']): # check to see if in output variables if v.lower() in myawsm.pysnobal_output_vars: # em.createVariable(v, 'f', dimensions[:3], chunksizes=(6,10,10)) em.createVariable(v, 'f', dimensions[:3], chunksizes=cs) setattr(em.variables[v], 'units', m['units'][i]) setattr(em.variables[v], 'description', m['description'][i]) # add projection info em = add_proj(em, None, myawsm.topo.topoConfig['filename']) options['output']['em'] = em # ------------------------------------------------------------------------ # SNOW netCDF s = {} s['name'] = [ 'thickness', 'snow_density', 'specific_mass', 'liquid_water', 'temp_surf', 'temp_lower', 'temp_snowcover', 'thickness_lower', 'water_saturation' ] s['units'] = [ 'm', 'kg m-3', 'kg m-2', 'kg m-2', 'C', 'C', 'C', 'm', 'percent' ] s['description'] = [ 'Predicted thickness of the snowcover', 'Predicted average snow density', 'Predicted specific mass of the snowcover', 'Predicted mass of liquid water in the snowcover', 'Predicted temperature of the surface layer', 'Predicted temperature of the lower layer', 'Predicted temperature of the snowcover', 'Predicted thickness of the lower layer', 'Predicted percentage of liquid water saturation of the snowcover' ] snowname = myawsm.snow_name + '.nc' # if myawsm.restart_run: # snowname = 'snow_restart_{}.nc'.format(myawsm.restart_hr) netcdfFile = os.path.join(options['output']['location'], snowname) if os.path.isfile(netcdfFile): myawsm._logger.warning( 'Opening {}, data may be overwritten!'.format(netcdfFile)) snow = nc.Dataset(netcdfFile, 'a') h = '[{}] Data added or updated'.format(datetime.now().strftime(fmt)) setattr(snow, 'last_modified', h) if 'projection' not in snow.variables.keys(): snow = add_proj(snow, None, myawsm.topo.topoConfig['filename']) else: dimensions = ('time', 'y', 'x') snow = nc.Dataset(netcdfFile, 'w') # create the dimensions snow.createDimension('time', None) snow.createDimension('y', len(init['y'])) snow.createDimension('x', len(init['x'])) # create some variables snow.createVariable('time', 'f', dimensions[0]) snow.createVariable('y', 'f', dimensions[1]) snow.createVariable('x', 'f', dimensions[2]) setattr(snow.variables['time'], 'units', 'hours since %s' % start_date.tz_localize(None)) setattr(snow.variables['time'], 'time_zone', str(myawsm.tzinfo).lower()) setattr(snow.variables['time'], 'calendar', 'standard') # setattr(snow.variables['time'], 'time_zone', time_zone) snow.variables['x'][:] = init['x'] snow.variables['y'][:] = init['y'] # snow image for i, v in enumerate(s['name']): # check to see if in output variables if v.lower() in myawsm.pysnobal_output_vars: snow.createVariable(v, 'f', dimensions[:3], chunksizes=cs) # snow.createVariable(v, 'f', dimensions[:3]) setattr(snow.variables[v], 'units', s['units'][i]) setattr(snow.variables[v], 'description', s['description'][i]) # add projection info snow = add_proj(snow, None, myawsm.topo.topoConfig['filename']) options['output']['snow'] = snow
def extract_data(self, fname, upload_type='modeled', espg=None, mask=None): """ Args: fname: String path to a local file. upload_type: specifies whether to name a file differently espg: Projection code to use if projection information not found if none, user will be prompted Returns: fname: New name of file where data was extracted. """ # Check for netcdfs if fname.split('.')[-1] == 'nc': # AWSM related items should have a variable called projection ds = Dataset(fname, 'r') # Base file name bname = os.path.basename(fname) if upload_type == 'modeled': # Add a parsed date to the string to avoid overwriting snow.nc self.log.info("Retrieving date from netcdf...") time = ds.variables['time'] dates = num2date(time[:], units=time.units, calendar=time.calendar) self.date = dates[0].isoformat().split('T')[0] cleaned_date = "".join([c for c in self.date if c not in ':-']) bname = bname.split(".")[0] + "_{}.nc".format(cleaned_date) fname = bname # Only copy some of the variables keep_vars = [ 'x', 'y', 'time', 'snow_density', 'specific_mass', 'thickness', 'projection' ] exclude_vars = [ v for v in ds.variables.keys() if v not in keep_vars ] mask_exlcude = [] elif upload_type == 'topo': self.date = dt.today().isoformat().split('T')[0] cleaned_date = "".join( [c for c in date.isoformat() if c not in ':-'])[:-2] bname = bname.split(".")[0] + "_{}.nc".format(cleaned_date) fname = bname mask_exlcude = ['mask'] # Create a copy self.log.info("Copying netcdf...") new_ds = copy_nc(ds, fname, exclude=exclude_vars) # Calculate mins and maxes for lyr in [ l for l in keep_vars if l not in ['x', 'y', 'time', 'projection'] ]: self.ranges[lyr] = [ np.min(new_ds.variables[lyr][:]), np.max(new_ds.variables[lyr][:]) ] # Optional Masking if mask != None: self.log.info("Masking netcdf using {}...".format(mask)) new_ds.close() # close the last one new_ds = mask_nc(fname, mask, exclude=mask_exlcude) # Check for missing projection if 'projection' not in new_ds.variables: self.log.info("Netcdf is missing projection information...") # Missing ESPG from args if espg == None: espg = input( "No projection detected. Enter the ESPG code for the data:\n" ) self.log.info( "Adding projection information using ESPG code {}...". format(espg)) new_ds = add_proj(new_ds, espg) # Clean up new_ds.close() ds.close() return fname
def __init__(self, variable_list, topo, time, outConfig): """ Initialize the output_netcdf() class Args: variable_list: list of dicts, one for each variable topo: loadTopo instance """ self._logger = logging.getLogger(__name__) # go through the variable list and make full file names for v in variable_list: variable_list[v]['file_name'] = \ str(variable_list[v]['out_location'] + '.nc') self.variable_list = variable_list # process the time section self.run_time_step = int(time['time_step']) self.out_frequency = int(outConfig['frequency']) self.outConfig = outConfig # determine the x,y vectors for the netCDF file x = topo.x y = topo.y self.mask = topo.mask dimensions = ('time', 'y', 'x') self.date_time = {} now_str = datetime.now().strftime(self.fmt) # Retrieve projection information from topo map_meta = add_proj_from_file(topo.topoConfig['filename']) for v in self.variable_list: f = self.variable_list[v] if os.path.isfile(f['file_name']): self._logger.warning( 'Opening {}, data may be overwritten!'.format( f['file_name'])) # open in append mode s = nc.Dataset(f['file_name'], 'a') h = '[{}] Data added or updated'.format(now_str) setattr(s, 'last_modified', h) if 'projection' not in s.variables.keys(): s = add_proj(s, map_meta=map_meta) else: self._logger.debug('Creating %s' % f['file_name']) s = nc.Dataset(f['file_name'], 'w', format='NETCDF4', clobber=True) # add dimensions s.createDimension(dimensions[0], None) s.createDimension(dimensions[1], y.shape[0]) s.createDimension(dimensions[2], x.shape[0]) # create the variables s.createVariable('time', 'f', (dimensions[0])) s.createVariable('y', 'f', dimensions[1]) s.createVariable('x', 'f', dimensions[2]) s.createVariable(f['variable'], 'f', (dimensions[0], dimensions[1], dimensions[2]), chunksizes=self.cs) # # define some attributes s.variables['time'].setncattr( 'units', 'hours since {}'.format(time['start_date'])) s.variables['time'].setncattr('calendar', 'standard') s.variables['time'].setncattr('time_zone', time['time_zone']) s.variables['time'].setncattr('long_name', 'time') # the y variable attributes s.variables['y'].setncattr('units', 'meters') s.variables['y'].setncattr('description', 'UTM, north south') s.variables['y'].setncattr('long_name', 'y coordinate') # the x variable attributes s.variables['x'].setncattr('units', 'meters') s.variables['x'].setncattr('description', 'UTM, east west') s.variables['x'].setncattr('long_name', 'x coordinate') # the variable attributes s.variables[f['variable']].setncattr('module', f['module']) s.variables[f['variable']].setncattr('units', f['info']['units']) s.variables[f['variable']].setncattr('long_name', f['info']['long_name']) # define some global attribute s.setncattr_string('Conventions', 'CF-1.6') s.setncattr_string('dateCreated', now_str) s.setncattr_string( 'title', 'Distributed {0} data from SMRF' ''.format(f['info']['long_name'])) s.setncattr_string('history', ('[{}] Create netCDF4 file' '').format(now_str)) s.setncattr_string('institution', ('USDA Agricultural Research' ' Service, Northwest' ' Watershed Research' ' Center')) s = add_proj(s, map_meta=map_meta) s.setncattr_string( 'references', 'Online documentation smrf.readthedocs.io;' ' https://doi.org/10.1016/j.cageo.2017.08.016') s.variables['y'][:] = y s.variables['x'][:] = x s.setncattr_string('source', 'SMRF {}'.format(utils.getgitinfo())) s.close()