def __init__(self, param_file, case_name, calendar, out_dir, file_format): self.param_file = param_file f = Dataset(param_file, 'r+') self.n_sources = len(f.dimensions['sources']) self.n_outlets = len(f.dimensions['outlets']) self.subset_length = f.variables['subset_length'][0] self.full_time_length = f.variables['full_time_length'][0] self.unit_hydrograph_dt = f.variables['unit_hydrograph_dt'][0] self.source_lon = f.variables['source_lon'][:] self.source_lat = f.variables['source_lat'][:] self.source_x_ind = f.variables['source_x_ind'][:] self.source_y_ind = f.variables['source_y_ind'][:] self.source_time_offset = f.variables['source_time_offset'][:] self.source2outlet_ind = f.variables['source2outlet_ind'][:] self.outlet_x_ind = f.variables['outlet_x_ind'][:] self.outlet_y_ind = f.variables['outlet_y_ind'][:] self.outlet_lon = f.variables['outlet_lon'][:] self.outlet_lat = f.variables['outlet_lat'][:] self.outlet_mask = f.variables['outlet_mask'][:] self.outlet_decomp_ind = f.variables['outlet_decomp_ind'][:] self.unit_hydrograph = f.variables['unit_hydrograph'][:] self.RvicDomainFile = f.RvicDomainFile self.RvicPourPointsFile = f.RvicPourPointsFile self.RvicUHFile = f.RvicUHFile self.RvicFdrFile = f.RvicFdrFile self.file_format = file_format self.glob_atts = NcGlobals(title='RVIC restart file', RvicPourPointsFile=f.RvicPourPointsFile, RvicUHFile=f.RvicUHFile, RvicFdrFile=f.RvicFdrFile, RvicDomainFile=f.RvicDomainFile, casename=case_name) f.close() # ------------------------------------------------------------ # # Initialize state variables self.ring = np.zeros((self.full_time_length, self.n_outlets, len(RVIC_TRACERS))) # ------------------------------------------------------------ # self._calendar = calendar self.__fname_format = os.path.join(out_dir, "%s.r.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (case_name)) # CESM calendar key (only NO_LEAP_C, GREGORIAN are supported in CESM) self._calendar_key = 0 for key, cals in CALENDAR_KEYS.iteritems(): if self._calendar in cals: self._calendar_key = key break
def __init__(self, param_file, case_name, calendar, out_dir, file_format): self.param_file = param_file f = Dataset(param_file, 'r') self.n_sources = len(f.dimensions['sources']) self.n_outlets = len(f.dimensions['outlets']) self.subset_length = f.variables['subset_length'][0] self.full_time_length = f.variables['full_time_length'][0] self.unit_hydrograph_dt = f.variables['unit_hydrograph_dt'][0] self.source_lon = f.variables['source_lon'][:] self.source_lat = f.variables['source_lat'][:] self.source_x_ind = f.variables['source_x_ind'][:] self.source_y_ind = f.variables['source_y_ind'][:] self.source_time_offset = f.variables['source_time_offset'][:] self.source2outlet_ind = f.variables['source2outlet_ind'][:] self.outlet_x_ind = f.variables['outlet_x_ind'][:] self.outlet_y_ind = f.variables['outlet_y_ind'][:] self.outlet_lon = f.variables['outlet_lon'][:] self.outlet_lat = f.variables['outlet_lat'][:] self.outlet_mask = f.variables['outlet_mask'][:] self.outlet_decomp_ind = f.variables['outlet_decomp_ind'][:] self.unit_hydrograph = {} for tracer in RVIC_TRACERS: tname = 'unit_hydrograph_{0}'.format(tracer) try: self.unit_hydrograph[tracer] = f.variables[tname][:] except KeyError: log.warning('Could not find unit hydrograph var %s', tname) log.warning('trying var name unit_hydrograph') self.unit_hydrograph[tracer] = f.variables['unit_hydrograph'][:] except: raise ValueError('Cant find Unit Hydrograph Variable') self.outlet_name = f.variables['outlet_name'][:] self.RvicDomainFile = f.RvicDomainFile self.RvicPourPointsFile = f.RvicPourPointsFile self.RvicUHFile = f.RvicUHFile self.RvicFdrFile = f.RvicFdrFile self.file_format = file_format self.glob_atts = NcGlobals(title='RVIC restart file', RvicPourPointsFile=f.RvicPourPointsFile, RvicUHFile=f.RvicUHFile, RvicFdrFile=f.RvicFdrFile, RvicDomainFile=f.RvicDomainFile, casename=case_name) f.close() # ------------------------------------------------------------ # # Initialize state variables self.ring = {} for tracer in RVIC_TRACERS: self.ring[tracer] = np.zeros((self.full_time_length, self.n_outlets,), dtype=np.float64) # ------------------------------------------------------------ # self._calendar = calendar self.__fname_format = os.path.join(out_dir, "%s.r.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (case_name)) # CESM calendar key (only NO_LEAP_C, GREGORIAN are supported in CESM) self._calendar_key = 0 for key, cals in CALENDAR_KEYS.iteritems(): if self._calendar in cals: self._calendar_key = key break
class Rvar(object): """ Creates a RVIC structure """ # ---------------------------------------------------------------- # # Initialize def __init__(self, param_file, case_name, calendar, out_dir, file_format): self.param_file = param_file f = Dataset(param_file, 'r') self.n_sources = len(f.dimensions['sources']) self.n_outlets = len(f.dimensions['outlets']) self.subset_length = f.variables['subset_length'][0] self.full_time_length = f.variables['full_time_length'][0] self.unit_hydrograph_dt = f.variables['unit_hydrograph_dt'][0] self.source_lon = f.variables['source_lon'][:] self.source_lat = f.variables['source_lat'][:] self.source_x_ind = f.variables['source_x_ind'][:] self.source_y_ind = f.variables['source_y_ind'][:] self.source_time_offset = f.variables['source_time_offset'][:] self.source2outlet_ind = f.variables['source2outlet_ind'][:] self.outlet_x_ind = f.variables['outlet_x_ind'][:] self.outlet_y_ind = f.variables['outlet_y_ind'][:] self.outlet_lon = f.variables['outlet_lon'][:] self.outlet_lat = f.variables['outlet_lat'][:] self.outlet_mask = f.variables['outlet_mask'][:] self.outlet_decomp_ind = f.variables['outlet_decomp_ind'][:] self.unit_hydrograph = {} for tracer in RVIC_TRACERS: tname = 'unit_hydrograph_{0}'.format(tracer) try: self.unit_hydrograph[tracer] = f.variables[tname][:] except KeyError: log.warning('Could not find unit hydrograph var %s', tname) log.warning('trying var name unit_hydrograph') self.unit_hydrograph[tracer] = f.variables['unit_hydrograph'][:] except: raise ValueError('Cant find Unit Hydrograph Variable') self.outlet_name = f.variables['outlet_name'][:] self.RvicDomainFile = f.RvicDomainFile self.RvicPourPointsFile = f.RvicPourPointsFile self.RvicUHFile = f.RvicUHFile self.RvicFdrFile = f.RvicFdrFile self.file_format = file_format self.glob_atts = NcGlobals(title='RVIC restart file', RvicPourPointsFile=f.RvicPourPointsFile, RvicUHFile=f.RvicUHFile, RvicFdrFile=f.RvicFdrFile, RvicDomainFile=f.RvicDomainFile, casename=case_name) f.close() # ------------------------------------------------------------ # # Initialize state variables self.ring = {} for tracer in RVIC_TRACERS: self.ring[tracer] = np.zeros((self.full_time_length, self.n_outlets,), dtype=np.float64) # ------------------------------------------------------------ # self._calendar = calendar self.__fname_format = os.path.join(out_dir, "%s.r.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (case_name)) # CESM calendar key (only NO_LEAP_C, GREGORIAN are supported in CESM) self._calendar_key = 0 for key, cals in CALENDAR_KEYS.iteritems(): if self._calendar in cals: self._calendar_key = key break # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Check that dom file matches def _check_domain_file(self, domain_file): """ Confirm that the dom files match in the parameter and domain files """ input_file = os.path.split(domain_file)[1] log.info('domain_file: %s', input_file) log.info('Parameter RvicDomainFile: %s', self.RvicDomainFile) if input_file == self.RvicDomainFile: log.info('dom files match in parameter and domain file') else: raise ValueError('dom files do not match in parameter and ' 'domain file') # ---------------------------------------------------------------- # def set_domain(self, dom_data, domain): """ Set the domain size """ self._check_domain_file(domain['FILE_NAME']) self.domain_shape = dom_data[domain['LAND_MASK_VAR']].shape self.ysize = self.domain_shape[0] self.xsize = self.domain_shape[1] if self.source_y_ind.max() >= self.ysize: raise ValueError('source_y_ind.max() ({0}) > domain ysize' ' ({1})'.format(self.source_y_ind, self.ysize)) if self.source_x_ind.max() >= self.xsize: raise ValueError('source_x_ind.max() ({0}) > domain xsize' ' ({1})'.format(self.source_x_ind, self.xsize)) log.info('set domain') # ---------------------------------------------------------------- # # Initilize State def init_state(self, state_file, run_type, timestamp): if run_type in ['startup', 'restart']: log.info('reading state_file: %s', state_file) f = Dataset(state_file, 'r') for tracer in RVIC_TRACERS: self.ring[tracer] = f.variables['{0}_ring'.format(tracer)][:] file_timestamp = ord_to_datetime(f.variables['time'][:], f.variables['time'].units, calendar=f.variables['time'].calendar) if run_type == 'restart': self.timestamp = file_timestamp elif run_type == 'startup': self.timestamp = timestamp if timestamp != file_timestamp: log.warning('restart timestamps do not match (%s, %s', file_timestamp, self.timestamp) log.warning('Runtype is startup so model will continue') else: raise ValueError('unknown run_type: {0}'.format(run_type)) # Check that timestep and outlet_decomp_ids match ParamFile if f.variables['unit_hydrograph_dt'][:] != self.unit_hydrograph_dt: raise ValueError('Timestep in Statefile does not match ' 'timestep in ParamFile') if not np.array_equal(f.variables['outlet_decomp_ind'][:], self.outlet_decomp_ind): raise ValueError('outlet_decomp_ind in Statefile does not ' 'match ParamFile') if f.RvicDomainFile != self.RvicDomainFile: raise ValueError('RvicDomainFile in StateFile does not match ' 'ParamFile') f.close() elif run_type == 'drystart': log.info('run_type is drystart so no state_file will be read') self.timestamp = timestamp self.time_ord = date2num(self.timestamp, TIMEUNITS, calendar=self._calendar) self._start_date = self.timestamp self._start_ord = self.time_ord # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Convolve def convolve(self, aggrunin, time_ord): """ This convoluition funciton works by looping over all points and doing the convolution one timestep at a time. This is accomplished by creating a convolution ring. Contributing flow from each timestep is added to the convolution ring. The convolution ring is saved as the state. The first column of values in the ring are the current runoff. """ # ------------------------------------------------------------ # # Check that the time_ord is in sync # This is the time at the start of the current step (end of last step) if self.time_ord != time_ord: log.error('rout_var.time_ord = %s, time_ord = %s', self.time_ord, time_ord) raise ValueError('rout_var.time_ord does not match the time_ord ' 'passed in by the convolution call') # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Do the convolution log.debug('convolving') for tracer in RVIC_TRACERS: # -------------------------------------------------------- # # First update the ring log.debug('rolling the ring') # Zero out current ring self.ring[tracer][0, :] = 0. # Equivalent to Fortran 90 cshift function self.ring[tracer] = np.roll(self.ring[tracer], -1, axis=0) # -------------------------------------------------------- # # -------------------------------------------------------- # # C convolution call rvic_convolve(self.n_sources, self.n_outlets, self.subset_length, self.xsize, self.source2outlet_ind, self.source_y_ind, self.source_x_ind, self.source_time_offset, self.unit_hydrograph[tracer][:, :], aggrunin[tracer], self.ring[tracer][:, :]) # -------------------------------------------------------- # # ------------------------------------------------------------ # # ------------------------------------------------------------ # # move the time_ord forward self.time_ord += self.unit_hydrograph_dt / SECSPERDAY self.timestamp = ord_to_datetime(self.time_ord, TIMEUNITS, calendar=self._calendar) return self.timestamp # ------------------------------------------------------------ # # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def get_time_mode(self, cpl_secs_per_step): """ Determine the relationship between the coupling period and the unit- hydrograph period. In cases where they do not match, the model will aggregate the appropriate quantities before/after the confolution step. """ log.info('Coupling Timestep is (seconds): %s', cpl_secs_per_step) log.info('RVIC Timestep is (seconds): %s', self.unit_hydrograph_dt) if (self.unit_hydrograph_dt % cpl_secs_per_step == 0) and \ (self.unit_hydrograph_dt >= cpl_secs_per_step): self.agg_tsteps = self.unit_hydrograph_dt/cpl_secs_per_step else: log.error('unit_hydrograph_dt must be a multiple of the ' 'cpl_secs_per_step') raise ValueError("Stopped due to error in determining agg_tsteps") log.info('RVIC will run 1 time for every %i coupling periods', self.agg_tsteps) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def get_rof(self): """Extract the current rof""" rof = {} for tracer in RVIC_TRACERS: rof[tracer] = self.ring[tracer][0, :] return rof # Current timestep flux (units=kg m-2 s-1) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def get_storage(self): """Extract the current storage""" storage = {} for tracer in RVIC_TRACERS: storage[tracer] = self.ring[tracer].sum(axis=1) return storage # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def write_initial(self): """write initial flux""" pass # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Write the current state def write_restart(self, current_history_files, history_restart_files): # ------------------------------------------------------------ # # Open file filename = self.timestamp.strftime(self.__fname_format) f = Dataset(filename, 'w', self.file_format) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Time Variables # Current time time = f.createDimension('time', 1) time = f.createVariable('time', NC_DOUBLE, ('time',)) time[:] = date2num(self.timestamp, TIMEUNITS, calendar=self._calendar) for key, val in share.time.__dict__.iteritems(): if val: setattr(time, key, val) time.calendar = self._calendar # Timesteps timesteps = f.createDimension('timesteps', self.full_time_length) timesteps = f.createVariable('timesteps', NC_DOUBLE, ('timesteps',)) timesteps[:] = np.arange(self.full_time_length) for key, val in share.timesteps.__dict__.iteritems(): if val: setattr(timesteps, key, val) timesteps.timestep_length = 'unit_hydrograph_dt' # UH timestep unit_hydrograph_dt = f.createVariable('unit_hydrograph_dt', NC_DOUBLE, ()) unit_hydrograph_dt[:] = self.unit_hydrograph_dt for key, val in share.unit_hydrograph_dt.__dict__.iteritems(): if val: setattr(unit_hydrograph_dt, key, val) timemgr_rst_type = f.createVariable('timemgr_rst_type', NC_DOUBLE, ()) timemgr_rst_type[:] = self._calendar_key for key, val in share.timemgr_rst_type.__dict__.iteritems(): if val: setattr(timemgr_rst_type, key, val) timemgr_rst_step_sec = f.createVariable('timemgr_rst_step_sec', NC_DOUBLE, ()) timemgr_rst_step_sec[:] = self.unit_hydrograph_dt for key, val in share.timemgr_rst_step_sec.__dict__.iteritems(): if val: setattr(timemgr_rst_step_sec, key, val) timemgr_rst_start_ymd = f.createVariable('timemgr_rst_start_ymd', NC_DOUBLE, ()) timemgr_rst_start_ymd[:] = self._start_date.year*10000 \ + self._start_date.month*100 + self._start_date.day for key, val in share.timemgr_rst_start_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_start_ymd, key, val) timemgr_rst_start_tod = f.createVariable('timemgr_rst_start_tod', NC_DOUBLE, ()) timemgr_rst_start_tod[:] = (self._start_ord % 1) * SECSPERDAY for key, val in share.timemgr_rst_start_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_start_tod, key, val) timemgr_rst_ref_ymd = f.createVariable('timemgr_rst_ref_ymd', NC_DOUBLE, ()) timemgr_rst_ref_ymd[:] = REFERENCE_DATE for key, val in share.timemgr_rst_ref_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_ref_ymd, key, val) timemgr_rst_ref_tod = f.createVariable('timemgr_rst_ref_tod', NC_DOUBLE, ()) timemgr_rst_ref_tod[:] = REFERENCE_TIME for key, val in share.timemgr_rst_ref_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_ref_tod, key, val) timemgr_rst_curr_ymd = f.createVariable('timemgr_rst_curr_ymd', NC_DOUBLE, ()) timemgr_rst_curr_ymd[:] = self.timestamp.year*10000 + \ self.timestamp.month*100+self.timestamp.day for key, val in share.timemgr_rst_curr_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_curr_ymd, key, val) timemgr_rst_curr_tod = f.createVariable('timemgr_rst_curr_tod', NC_DOUBLE, ()) timemgr_rst_curr_tod[:] = (self.time_ord % 1)*SECSPERDAY for key, val in share.timemgr_rst_curr_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_curr_tod, key, val) # ------------------------------------------------------------ # # Setup Tape Dimensions coords = ('tapes', 'max_chars') ntapes = f.createDimension(coords[0], len(history_restart_files)) ntapes = f.createDimension(coords[1], MAX_NC_CHARS) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Write Fields locfnh = f.createVariable('locfnh', NC_CHAR, coords) for i, string in enumerate(current_history_files): locfnh[i, :] = stringtochar(np.array(string.ljust(MAX_NC_CHARS))) locfnh.long_name = 'History filename' locfnh.comment = 'This variable NOT needed for startup or branch simulations' locfnhr = f.createVariable('locfnhr', NC_CHAR, coords) for i, string in enumerate(history_restart_files): locfnh[i, :] = stringtochar(np.array(string.ljust(MAX_NC_CHARS))) locfnhr.long_name = 'History restart filename' locfnhr.comment = 'This variable NOT needed for startup or branch simulations' # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Setup Point Dimensions coords = ('outlets', ) outlets = f.createDimension(coords[0], self.n_outlets) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Write Fields oyi = f.createVariable('outlet_y_ind', NC_INT, coords[0]) oyi[:] = self.outlet_y_ind for key, val in share.outlet_y_ind.__dict__.iteritems(): if val: setattr(oyi, key, val) oxi = f.createVariable('outlet_x_ind', NC_INT, coords[0]) oxi[:] = self.outlet_x_ind for key, val in share.outlet_x_ind.__dict__.iteritems(): if val: setattr(oxi, key, val) odi = f.createVariable('outlet_decomp_ind', NC_INT, coords[0]) odi[:] = self.outlet_decomp_ind for key, val in share.outlet_decomp_ind.__dict__.iteritems(): if val: setattr(odi, key, val) tcoords = ('timesteps', ) + coords for tracer in RVIC_TRACERS: ring = f.createVariable('{0}_ring'.format(tracer), NC_DOUBLE, tcoords) ring[:, :] = self.ring[tracer][:, :] for key, val in share.ring.__dict__.iteritems(): if val: setattr(ring, key, val) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # write global attributes self.glob_atts.update() for key, val in self.glob_atts.atts.iteritems(): if val: setattr(f, key, val) # ------------------------------------------------------------ # f.close() log.info('Finished writing %s', filename) return filename
def __init__(self, param_file, case_name, calendar, out_dir, file_format, zlib=True, complevel=4, least_significant_digit=None): self.param_file = param_file f = Dataset(param_file, 'r') self.n_sources = len(f.dimensions['sources']) self.n_outlets = len(f.dimensions['outlets']) self.subset_length = int(f.variables['subset_length'][:]) self.full_time_length = int(f.variables['full_time_length'][:]) self.unit_hydrograph_dt = f.variables['unit_hydrograph_dt'][:] self.source_lon = f.variables['source_lon'][:] self.source_lat = f.variables['source_lat'][:] self.source_x_ind = f.variables['source_x_ind'][:] self.source_y_ind = f.variables['source_y_ind'][:] self.source_time_offset = f.variables['source_time_offset'][:] self.source2outlet_ind = f.variables['source2outlet_ind'][:] self.outlet_x_ind = f.variables['outlet_x_ind'][:] self.outlet_y_ind = f.variables['outlet_y_ind'][:] self.outlet_lon = f.variables['outlet_lon'][:] self.outlet_lat = f.variables['outlet_lat'][:] self.outlet_mask = f.variables['outlet_mask'][:] self.outlet_decomp_ind = f.variables['outlet_decomp_ind'][:] self.unit_hydrograph = {} for tracer in RVIC_TRACERS: tname = 'unit_hydrograph_{0}'.format(tracer) try: self.unit_hydrograph[tracer] = f.variables[tname][:] except KeyError: log.warning('Could not find unit hydrograph var %s', tname) log.warning('trying var name unit_hydrograph') self.unit_hydrograph[tracer] = f.variables[ 'unit_hydrograph'][:] except: raise ValueError('Cant find Unit Hydrograph Variable') self.outlet_name = f.variables['outlet_name'][:] self.RvicDomainFile = f.RvicDomainFile self.RvicPourPointsFile = f.RvicPourPointsFile self.RvicUHFile = f.RvicUHFile self.RvicFdrFile = f.RvicFdrFile self.file_format = file_format try: self.outlet_upstream_area = f.variables['outlet_upstream_area'][:] except: self.outlet_upstream_area = None self.glob_atts = NcGlobals(title='RVIC restart file', RvicPourPointsFile=f.RvicPourPointsFile, RvicUHFile=f.RvicUHFile, RvicFdrFile=f.RvicFdrFile, RvicDomainFile=f.RvicDomainFile, casename=case_name) f.close() # ------------------------------------------------------------ # # Initialize state variables self.ring = {} for tracer in RVIC_TRACERS: self.ring[tracer] = np.zeros(( self.full_time_length, self.n_outlets, ), dtype=np.float64) # ------------------------------------------------------------ # self._calendar = calendar self.__fname_format = os.path.join( out_dir, "%s.r.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (case_name)) # ------------------------------------------------------------ # # CESM calendar key (only NO_LEAP_C, GREGORIAN are supported in CESM) self._calendar_key = 0 for key, cals in CALENDAR_KEYS.iteritems(): if self._calendar in cals: self._calendar_key = key break # ------------------------------------------------------------ # # ------------------------------------------------------------ # # netCDF variable options self.ncvaropts = { 'zlib': zlib, 'complevel': complevel, 'least_significant_digit': least_significant_digit }
class Rvar(object): """ Creates a RVIC structure """ # ---------------------------------------------------------------- # # Initialize def __init__(self, param_file, case_name, calendar, out_dir, file_format, zlib=True, complevel=4, least_significant_digit=None): self.param_file = param_file f = Dataset(param_file, 'r') self.n_sources = len(f.dimensions['sources']) self.n_outlets = len(f.dimensions['outlets']) self.subset_length = int(f.variables['subset_length'][:]) self.full_time_length = int(f.variables['full_time_length'][:]) self.unit_hydrograph_dt = f.variables['unit_hydrograph_dt'][:] self.source_lon = f.variables['source_lon'][:] self.source_lat = f.variables['source_lat'][:] self.source_x_ind = f.variables['source_x_ind'][:] self.source_y_ind = f.variables['source_y_ind'][:] self.source_time_offset = f.variables['source_time_offset'][:] self.source2outlet_ind = f.variables['source2outlet_ind'][:] self.outlet_x_ind = f.variables['outlet_x_ind'][:] self.outlet_y_ind = f.variables['outlet_y_ind'][:] self.outlet_lon = f.variables['outlet_lon'][:] self.outlet_lat = f.variables['outlet_lat'][:] self.outlet_mask = f.variables['outlet_mask'][:] self.outlet_decomp_ind = f.variables['outlet_decomp_ind'][:] self.unit_hydrograph = {} for tracer in RVIC_TRACERS: tname = 'unit_hydrograph_{0}'.format(tracer) try: self.unit_hydrograph[tracer] = f.variables[tname][:] except KeyError: log.warning('Could not find unit hydrograph var %s', tname) log.warning('trying var name unit_hydrograph') self.unit_hydrograph[tracer] = f.variables[ 'unit_hydrograph'][:] except: raise ValueError('Cant find Unit Hydrograph Variable') self.outlet_name = f.variables['outlet_name'][:] self.RvicDomainFile = f.RvicDomainFile self.RvicPourPointsFile = f.RvicPourPointsFile self.RvicUHFile = f.RvicUHFile self.RvicFdrFile = f.RvicFdrFile self.file_format = file_format try: self.outlet_upstream_area = f.variables['outlet_upstream_area'][:] except: self.outlet_upstream_area = None self.glob_atts = NcGlobals(title='RVIC restart file', RvicPourPointsFile=f.RvicPourPointsFile, RvicUHFile=f.RvicUHFile, RvicFdrFile=f.RvicFdrFile, RvicDomainFile=f.RvicDomainFile, casename=case_name) f.close() # ------------------------------------------------------------ # # Initialize state variables self.ring = {} for tracer in RVIC_TRACERS: self.ring[tracer] = np.zeros(( self.full_time_length, self.n_outlets, ), dtype=np.float64) # ------------------------------------------------------------ # self._calendar = calendar self.__fname_format = os.path.join( out_dir, "%s.r.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (case_name)) # ------------------------------------------------------------ # # CESM calendar key (only NO_LEAP_C, GREGORIAN are supported in CESM) self._calendar_key = 0 for key, cals in CALENDAR_KEYS.iteritems(): if self._calendar in cals: self._calendar_key = key break # ------------------------------------------------------------ # # ------------------------------------------------------------ # # netCDF variable options self.ncvaropts = { 'zlib': zlib, 'complevel': complevel, 'least_significant_digit': least_significant_digit } # ------------------------------------------------------------ # # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Check that dom file matches def _check_domain_file(self, domain_file): """ Confirm that the dom files match in the parameter and domain files """ input_file = os.path.split(domain_file)[1] log.info('domain_file: %s', input_file) log.info('Parameter RvicDomainFile: %s', self.RvicDomainFile) if input_file == self.RvicDomainFile: log.info('dom files match in parameter and domain file') else: raise ValueError('dom files do not match in parameter and ' 'domain file') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def set_domain(self, dom_data, domain, lat0_is_min): """ Set the domain size """ self._check_domain_file(domain['FILE_NAME']) self.domain_shape = dom_data[domain['LAND_MASK_VAR']].shape self.ysize = self.domain_shape[0] self.xsize = self.domain_shape[1] if self.source_y_ind.max() >= self.ysize: raise ValueError('source_y_ind.max() ({0}) > domain ysize' ' ({1})'.format(self.source_y_ind, self.ysize)) if self.source_x_ind.max() >= self.xsize: raise ValueError('source_x_ind.max() ({0}) > domain xsize' ' ({1})'.format(self.source_x_ind, self.xsize)) log.info('set domain') if lat0_is_min: log.info('Flipping Parameter File Y inds...') self._flip_y_inds() # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Flip the y index order def _flip_y_inds(self): """ Flip the y index order """ self.source_y_ind = self.ysize - self.source_y_ind - 1 self.outlet_y_ind = self.ysize - self.outlet_y_ind - 1 # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Initilize State def init_state(self, state_file, run_type, timestamp): if run_type in ['startup', 'restart']: log.info('reading state_file: %s', state_file) f = Dataset(state_file, 'r') for tracer in RVIC_TRACERS: self.ring[tracer] = f.variables['{0}_ring'.format(tracer)][:] file_timestamp = ord_to_datetime( f.variables['time'][:], f.variables['time'].units, calendar=f.variables['time'].calendar) if run_type == 'restart': self.timestamp = file_timestamp elif run_type == 'startup': self.timestamp = timestamp if timestamp != file_timestamp: log.warning('restart timestamps do not match (%s, %s', file_timestamp, self.timestamp) log.warning('Runtype is startup so model will continue') else: raise ValueError('unknown run_type: {0}'.format(run_type)) # Check that timestep and outlet_decomp_ids match ParamFile if f.variables['unit_hydrograph_dt'][:] != self.unit_hydrograph_dt: raise ValueError('Timestep in Statefile does not match ' 'timestep in ParamFile') if not np.array_equal(f.variables['outlet_decomp_ind'][:], self.outlet_decomp_ind): raise ValueError('outlet_decomp_ind in Statefile does not ' 'match ParamFile') if f.RvicDomainFile != self.RvicDomainFile: raise ValueError('RvicDomainFile in StateFile does not match ' 'ParamFile') f.close() elif run_type == 'drystart': log.info('run_type is drystart so no state_file will be read') self.timestamp = timestamp self.time_ord = date2num(self.timestamp, TIMEUNITS, calendar=self._calendar) self._start_date = self.timestamp self._start_ord = self.time_ord # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Convolve def convolve(self, aggrunin, time_ord): """ This convoluition funciton works by looping over all points and doing the convolution one timestep at a time. This is accomplished by creating a convolution ring. Contributing flow from each timestep is added to the convolution ring. The convolution ring is saved as the state. The first column of values in the ring are the current runoff. """ # ------------------------------------------------------------ # # Check that the time_ord is in sync # This is the time at the start of the current step (end of last step) if self.time_ord != time_ord: log.error('rout_var.time_ord = %s, time_ord = %s', self.time_ord, time_ord) raise ValueError('rout_var.time_ord does not match the time_ord ' 'passed in by the convolution call') # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Do the convolution log.debug('convolving') for tracer in RVIC_TRACERS: # -------------------------------------------------------- # # First update the ring log.debug('rolling the ring') # Zero out current ring self.ring[tracer][0, :] = 0. # Equivalent to Fortran 90 cshift function self.ring[tracer] = np.roll(self.ring[tracer], -1, axis=0) # -------------------------------------------------------- # # -------------------------------------------------------- # # C convolution call rvic_convolve(self.n_sources, self.n_outlets, self.subset_length, self.xsize, self.source2outlet_ind, self.source_y_ind, self.source_x_ind, self.source_time_offset, self.unit_hydrograph[tracer][:, :], aggrunin[tracer], self.ring[tracer][:, :]) # -------------------------------------------------------- # # ------------------------------------------------------------ # # ------------------------------------------------------------ # # move the time_ord forward self.time_ord += self.unit_hydrograph_dt / SECSPERDAY self.timestamp = ord_to_datetime(self.time_ord, TIMEUNITS, calendar=self._calendar) return self.timestamp # ------------------------------------------------------------ # # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def get_time_mode(self, cpl_secs_per_step): """ Determine the relationship between the coupling period and the unit- hydrograph period. In cases where they do not match, the model will aggregate the appropriate quantities before/after the confolution step. """ log.info('Coupling Timestep is (seconds): %s', cpl_secs_per_step) log.info('RVIC Timestep is (seconds): %s', self.unit_hydrograph_dt) if (self.unit_hydrograph_dt % cpl_secs_per_step == 0) and \ (self.unit_hydrograph_dt >= cpl_secs_per_step): self.agg_tsteps = self.unit_hydrograph_dt / cpl_secs_per_step else: log.error('unit_hydrograph_dt must be a multiple of the ' 'cpl_secs_per_step') raise ValueError("Stopped due to error in determining agg_tsteps") log.info('RVIC will run 1 time for every %i coupling periods', self.agg_tsteps) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def get_rof(self): """Extract the current rof""" rof = {} for tracer in RVIC_TRACERS: rof[tracer] = self.ring[tracer][0, :] return rof # Current timestep flux (units=kg m-2 s-1) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def get_storage(self): """Extract the current storage""" storage = {} for tracer in RVIC_TRACERS: storage[tracer] = self.ring[tracer].sum(axis=1) return storage # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # def write_initial(self): """write initial flux""" pass # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Write the current state def write_restart(self, current_history_files, history_restart_files): # ------------------------------------------------------------ # # Open file filename = self.timestamp.strftime(self.__fname_format) f = Dataset(filename, 'w', self.file_format) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Time Variables # Current time time = f.createDimension('time', 1) time = f.createVariable('time', NC_DOUBLE, ('time', ), **self.ncvaropts) time[:] = date2num(self.timestamp, TIMEUNITS, calendar=self._calendar) for key, val in share.time.__dict__.iteritems(): if val: setattr(time, key, val) time.calendar = self._calendar # Timesteps timesteps = f.createDimension('timesteps', self.full_time_length) timesteps = f.createVariable('timesteps', NC_DOUBLE, ('timesteps', ), **self.ncvaropts) timesteps[:] = np.arange(self.full_time_length) for key, val in share.timesteps.__dict__.iteritems(): if val: setattr(timesteps, key, val) timesteps.timestep_length = 'unit_hydrograph_dt' # UH timestep unit_hydrograph_dt = f.createVariable('unit_hydrograph_dt', NC_DOUBLE, (), **self.ncvaropts) unit_hydrograph_dt[:] = self.unit_hydrograph_dt for key, val in share.unit_hydrograph_dt.__dict__.iteritems(): if val: setattr(unit_hydrograph_dt, key, val) timemgr_rst_type = f.createVariable('timemgr_rst_type', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_type[:] = self._calendar_key for key, val in share.timemgr_rst_type.__dict__.iteritems(): if val: setattr(timemgr_rst_type, key, val) timemgr_rst_step_sec = f.createVariable('timemgr_rst_step_sec', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_step_sec[:] = self.unit_hydrograph_dt for key, val in share.timemgr_rst_step_sec.__dict__.iteritems(): if val: setattr(timemgr_rst_step_sec, key, val) timemgr_rst_start_ymd = f.createVariable('timemgr_rst_start_ymd', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_start_ymd[:] = self._start_date.year*10000 \ + self._start_date.month*100 + self._start_date.day for key, val in share.timemgr_rst_start_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_start_ymd, key, val) timemgr_rst_start_tod = f.createVariable('timemgr_rst_start_tod', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_start_tod[:] = (self._start_ord % 1) * SECSPERDAY for key, val in share.timemgr_rst_start_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_start_tod, key, val) timemgr_rst_ref_ymd = f.createVariable('timemgr_rst_ref_ymd', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_ref_ymd[:] = REFERENCE_DATE for key, val in share.timemgr_rst_ref_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_ref_ymd, key, val) timemgr_rst_ref_tod = f.createVariable('timemgr_rst_ref_tod', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_ref_tod[:] = REFERENCE_TIME for key, val in share.timemgr_rst_ref_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_ref_tod, key, val) timemgr_rst_curr_ymd = f.createVariable('timemgr_rst_curr_ymd', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_curr_ymd[:] = self.timestamp.year*10000 + \ self.timestamp.month*100+self.timestamp.day for key, val in share.timemgr_rst_curr_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_curr_ymd, key, val) timemgr_rst_curr_tod = f.createVariable('timemgr_rst_curr_tod', NC_DOUBLE, (), **self.ncvaropts) timemgr_rst_curr_tod[:] = (self.time_ord % 1) * SECSPERDAY for key, val in share.timemgr_rst_curr_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_curr_tod, key, val) # ------------------------------------------------------------ # # Setup Tape Dimensions coords = ('tapes', 'max_chars') ntapes = f.createDimension(coords[0], len(history_restart_files)) ntapes = f.createDimension(coords[1], MAX_NC_CHARS) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Write Fields locfnh = f.createVariable('locfnh', NC_CHAR, coords, **self.ncvaropts) for i, string in enumerate(current_history_files): locfnh[i, :] = stringtochar(np.array(string.ljust(MAX_NC_CHARS))) locfnh.long_name = 'History filename' locfnh.comment = 'This variable is NOT needed for startup or branch simulations' locfnhr = f.createVariable('locfnhr', NC_CHAR, coords, **self.ncvaropts) for i, string in enumerate(history_restart_files): locfnh[i, :] = stringtochar(np.array(string.ljust(MAX_NC_CHARS))) locfnhr.long_name = 'History restart filename' locfnhr.comment = 'This variable NOT needed for startup or branch simulations' # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Setup Point Dimensions coords = ('outlets', ) outlets = f.createDimension(coords[0], self.n_outlets) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Write Fields oyi = f.createVariable('outlet_y_ind', NC_INT, coords[0], **self.ncvaropts) oyi[:] = self.outlet_y_ind for key, val in share.outlet_y_ind.__dict__.iteritems(): if val: setattr(oyi, key, val) oxi = f.createVariable('outlet_x_ind', NC_INT, coords[0], **self.ncvaropts) oxi[:] = self.outlet_x_ind for key, val in share.outlet_x_ind.__dict__.iteritems(): if val: setattr(oxi, key, val) odi = f.createVariable('outlet_decomp_ind', NC_INT, coords[0], **self.ncvaropts) odi[:] = self.outlet_decomp_ind for key, val in share.outlet_decomp_ind.__dict__.iteritems(): if val: setattr(odi, key, val) tcoords = ('timesteps', ) + coords for tracer in RVIC_TRACERS: ring = f.createVariable('{0}_ring'.format(tracer), NC_DOUBLE, tcoords, **self.ncvaropts) ring[:, :] = self.ring[tracer][:, :] for key, val in share.ring.__dict__.iteritems(): if val: setattr(ring, key, val) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # write global attributes self.glob_atts.update() for key, val in self.glob_atts.atts.iteritems(): if val: setattr(f, key, val) # ------------------------------------------------------------ # f.close() log.info('Finished writing %s', filename) return filename
def finish_params(outlets, dom_data, config_dict, directories): """ Adjust the unit hydrographs and pack for parameter file """ options = config_dict['OPTIONS'] routing = config_dict['ROUTING'] domain = config_dict['DOMAIN'] dom_area = domain['AREA_VAR'] dom_frac = domain['FRACTION_VAR'] # ------------------------------------------------------------ # # netCDF variable options ncvaropts = {} if 'NETCDF_ZLIB' in options: ncvaropts['zlib'] = options['NETCDF_ZLIB'] if 'NETCDF_COMPLEVEL' in options: ncvaropts['complevel'] = options['NETCDF_COMPLEVEL'] if 'NETCDF_SIGFIGS' in options: ncvaropts['least_significant_digit'] = options['NETCDF_SIGFIGS'] # ------------------------------------------------------------ # # ---------------------------------------------------------------- # # subset (shorten time base) if options['SUBSET_DAYS'] and \ options['SUBSET_DAYS'] < routing['BASIN_FLOWDAYS']: subset_length = options['SUBSET_DAYS']*SECSPERDAY/routing['OUTPUT_INTERVAL'] outlets, full_time_length, \ before, after = subset(outlets, subset_length=subset_length) slc = slice(min(len(before), 1000)) log.debug('plotting unit hydrograph timeseries now for before' ' / after subseting') title = 'UHS before subset' pfname = plots.uhs(before[slc], title, options['CASEID'], directories['plots']) log.info('%s Plot: %s', title, pfname) title = 'UHS after subset' pfname = plots.uhs(after[slc], title, options['CASEID'], directories['plots']) log.info('%s Plot: %s', title, pfname) else: log.info('Not subsetting because either SUBSET_DAYS is null or ' 'SUBSET_DAYS<BASIN_FLOWDAYS') for outlet in outlets.itervalues(): outlet.offset = np.zeros(outlet.unit_hydrograph.shape[1], dtype=np.int16) full_time_length = outlet.unit_hydrograph.shape[0] subset_length = full_time_length # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # adjust fractions if options['CONSTRAIN_FRACTIONS']: adjust = True log.info('Adjusting Fractions to be less than or equal to ' 'domain fractions') else: adjust = False outlets, plot_dict = adjust_fractions(outlets, dom_data[domain['FRACTION_VAR']], adjust=adjust) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Calculate the upstream area and upstream grid cells # The upstream_area must be calculated after adjust_fractions for i, outlet in outlets.iteritems(): outlet.upstream_gridcells = len(outlet.y_source) outlet.upstream_area = np.sum(dom_data[dom_area][outlet.y_source, outlet.x_source] * dom_data[dom_frac][outlet.y_source, outlet.x_source]) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Group grouped_data = group(outlets, subset_length) # unpack grouped data unit_hydrograph = grouped_data['unit_hydrograph'] frac_sources = grouped_data['frac_sources'] source_lon = grouped_data['source_lon'] source_lat = grouped_data['source_lat'] source_x_ind = grouped_data['source_x_ind'] source_y_ind = grouped_data['source_y_ind'] source_decomp_ind = grouped_data['source_decomp_ind'] source_time_offset = grouped_data['source_time_offset'] source2outlet_ind = grouped_data['source2outlet_ind'] outlet_lon = grouped_data['outlet_lon'] outlet_lat = grouped_data['outlet_lat'] outlet_x_ind = grouped_data['outlet_x_ind'] outlet_y_ind = grouped_data['outlet_y_ind'] outlet_decomp_ind = grouped_data['outlet_decomp_ind'] outlet_number = grouped_data['outlet_number'] outlet_name = grouped_data['outlet_name'] outlet_upstream_area = grouped_data['outlet_upstream_area'] outlet_upstream_gridcells = grouped_data['outlet_upstream_gridcells'] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Adjust Unit Hydrographs for differences in source/outlet areas and # fractions area = dom_data[domain['AREA_VAR']] if outlet_y_ind.ndim == 0 or outlet_x_ind.ndim == 0: for source, outlet in enumerate(source2outlet_ind): unit_hydrograph[:, source] *= area[source_y_ind[source], source_x_ind[source]] unit_hydrograph[:, source] /= area[outlet_y_ind[()], outlet_x_ind[()]] unit_hydrograph[:, source] *= frac_sources[source] else: for source, outlet in enumerate(source2outlet_ind): unit_hydrograph[:, source] *= area[source_y_ind[source], source_x_ind[source]] unit_hydrograph[:, source] /= area[outlet_y_ind[outlet], outlet_x_ind[outlet]] unit_hydrograph[:, source] *= frac_sources[source] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Make diagnostic plots sum_after = np.zeros(dom_data[domain['FRACTION_VAR']].shape) for i, (y, x) in enumerate(zip(source_y_ind, source_x_ind)): sum_after[y, x] += unit_hydrograph[:, i].sum() plot_dict['Sum UH Final'] = sum_after dom_y = dom_data[domain['LATITUDE_VAR']] dom_x = dom_data[domain['LONGITUDE_VAR']] for title, data in plot_dict.iteritems(): pfname = plots.fractions(data, dom_x, dom_y, title, options['CASEID'], directories['plots']) log.info('%s Plot: %s', title, pfname) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # fill in some misc arrays if outlet_y_ind.ndim == 0: numoutlets = 1 else: numoutlets = len(outlet_lon) outlet_mask = np.zeros(numoutlets) newshape = unit_hydrograph.shape + (1, ) unit_hydrograph = unit_hydrograph.reshape(newshape) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Write parameter file today = date.today().strftime('%Y%m%d') param_file = os.path.join(directories['params'], '{0}.rvic.prm.{1}.{2}.' 'nc'.format(options['CASEID'], options['GRIDID'], today)) if 'NEW_DOMAIN' in config_dict.keys(): dom_file_name = config_dict['NEW_DOMAIN']['FILE_NAME'] else: dom_file_name = config_dict['DOMAIN']['FILE_NAME'] glob_atts = NcGlobals(title='RVIC parameter file', RvicPourPointsFile=os.path.split(config_dict['POUR_POINTS']['FILE_NAME'])[1], RvicUHFile=os.path.split(config_dict['UH_BOX']['FILE_NAME'])[1], RvicFdrFile=os.path.split(routing['FILE_NAME'])[1], RvicDomainFile=os.path.split(dom_file_name)[1]) write_param_file(param_file, nc_format=options['NETCDF_FORMAT'], glob_atts=glob_atts, full_time_length=full_time_length, subset_length=subset_length, unit_hydrograph_dt=routing['OUTPUT_INTERVAL'], outlet_lon=outlet_lon, outlet_lat=outlet_lat, outlet_x_ind=outlet_x_ind, outlet_y_ind=outlet_y_ind, outlet_decomp_ind=outlet_decomp_ind, outlet_number=outlet_number, outlet_mask=outlet_mask, outlet_name=outlet_name, outlet_upstream_gridcells=outlet_upstream_gridcells, outlet_upstream_area=outlet_upstream_area, source_lon=source_lon, source_lat=source_lat, source_x_ind=source_x_ind, source_y_ind=source_y_ind, source_decomp_ind=source_decomp_ind, source_time_offset=source_time_offset, source2outlet_ind=source2outlet_ind, unit_hydrograph=unit_hydrograph, **ncvaropts) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # write a summary of what was done to the log file. log.info('Parameter file includes %i outlets', len(outlets)) log.info('Parameter file includes %i Source Points', len(source_lon)) # ---------------------------------------------------------------- # return param_file, today
def write_param_file(file_name, nc_format='NETCDF3_CLASSIC', glob_atts=NcGlobals(), full_time_length=None, subset_length=None, unit_hydrograph_dt=None, outlet_lon=None, outlet_lat=None, outlet_x_ind=None, outlet_y_ind=None, outlet_decomp_ind=None, outlet_number=None, outlet_mask=None, outlet_name=None, outlet_upstream_gridcells=None, outlet_upstream_area=None, source_lon=None, source_lat=None, source_x_ind=None, source_y_ind=None, source_decomp_ind=None, source_time_offset=None, source2outlet_ind=None, source_tracer=None, unit_hydrograph=None, zlib=True, complevel=4, least_significant_digit=None): """ Write a standard RVIC Parameter file """ # ---------------------------------------------------------------- # # netCDF variable options ncvaropts = { 'zlib': zlib, 'complevel': complevel, 'least_significant_digit': least_significant_digit } # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Open file f = Dataset(file_name, 'w', format=nc_format) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Time Variables # Timesteps timesteps = f.createDimension('timesteps', subset_length) timesteps = f.createVariable('timesteps', NC_DOUBLE, ('timesteps', ), **ncvaropts) timesteps[:] = np.arange(subset_length) for key, val in share.timesteps.__dict__.iteritems(): if val: setattr(timesteps, key, val) timesteps.timestep_length = 'unit_hydrograph_dt' # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # write global attributes glob_atts.update() for key, val in glob_atts.atts.iteritems(): if val: setattr(f, key, val) f.featureType = "timeSeries" # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # 0-D variables # Full time length (size of ring) ftl = f.createVariable('full_time_length', NC_INT, (), **ncvaropts) ftl[:] = full_time_length for key, val in share.full_time_length.__dict__.iteritems(): if val: setattr(ftl, key, val) # Subset Length sl = f.createVariable('subset_length', NC_INT, (), **ncvaropts) sl[:] = subset_length for key, val in share.subset_length.__dict__.iteritems(): if val: setattr(sl, key, val) # UH timestep uh_dt = f.createVariable('unit_hydrograph_dt', NC_DOUBLE, (), **ncvaropts) uh_dt[:] = unit_hydrograph_dt for key, val in share.unit_hydrograph_dt.__dict__.iteritems(): if val: setattr(uh_dt, key, val) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Outlet Dimensions if outlet_y_ind.ndim == 0: numoutlets = 1 outlet_name = np.array([outlet_name]) else: numoutlets = len(outlet_lon) ocoords = ('outlets', ) outlets = f.createDimension(ocoords[0], numoutlets) nocoords = ocoords + ('nc_chars', ) char_names = stringtochar(outlet_name) chars = f.createDimension(nocoords[1], char_names.shape[1]) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # 1-D Outlet Variables # Outlet Cell Longitudes olon = f.createVariable('outlet_lon', NC_DOUBLE, ocoords, **ncvaropts) olon[:] = outlet_lon for key, val in share.outlet_lon.__dict__.iteritems(): if val: setattr(olon, key, val) # Outlet Cell Latitudes olat = f.createVariable('outlet_lat', NC_DOUBLE, ocoords, **ncvaropts) olat[:] = outlet_lat for key, val in share.outlet_lat.__dict__.iteritems(): if val: setattr(olat, key, val) # Outlet Cell X Indicies oxi = f.createVariable('outlet_x_ind', NC_INT, ocoords, **ncvaropts) oxi[:] = outlet_x_ind for key, val in share.outlet_x_ind.__dict__.iteritems(): if val: setattr(oxi, key, val) # Outlet Cell Y Indicies oyi = f.createVariable('outlet_y_ind', NC_INT, ocoords, **ncvaropts) oyi[:] = outlet_y_ind for key, val in share.outlet_y_ind.__dict__.iteritems(): if val: setattr(oyi, key, val) # Outlet Cell Decomp IDs odi = f.createVariable('outlet_decomp_ind', NC_INT, ocoords, **ncvaropts) odi[:] = outlet_decomp_ind for key, val in share.outlet_decomp_ind.__dict__.iteritems(): if val: setattr(odi, key, val) # Outlet Cell Number on = f.createVariable('outlet_number', NC_INT, ocoords, **ncvaropts) on[:] = outlet_number for key, val in share.outlet_number.__dict__.iteritems(): if val: setattr(on, key, val) # Outlet Mask om = f.createVariable('outlet_mask', NC_INT, ocoords, **ncvaropts) om[:] = outlet_mask for key, val in share.outlet_mask.__dict__.iteritems(): if val: setattr(om, key, val) # Outlet Upstream area oua = f.createVariable('outlet_upstream_area', NC_DOUBLE, ocoords, **ncvaropts) oua[:] = outlet_upstream_area for key, val in share.outlet_upstream_area.__dict__.iteritems(): if val: setattr(oua, key, val) # Outlet Upstream grid cells oug = f.createVariable('outlet_upstream_gridcells', NC_INT, ocoords, **ncvaropts) oug[:] = outlet_upstream_gridcells for key, val in share.outlet_upstream_gridcells.__dict__.iteritems(): if val: setattr(oug, key, val) # Outlet Names onm = f.createVariable('outlet_name', NC_CHAR, nocoords) onm[:, :] = char_names for key, val in share.outlet_name.__dict__.iteritems(): if val: setattr(onm, key, val) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Source Dimension scoords = ('sources', ) sources = f.createDimension(scoords[0], len(source_lon)) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # 1D Source Variables # Source Cell Longitudes slon = f.createVariable('source_lon', NC_DOUBLE, scoords, **ncvaropts) slon[:] = source_lon for key, val in share.source_lon.__dict__.iteritems(): if val: setattr(slon, key, val) # Source Cell Latitudes slat = f.createVariable('source_lat', NC_DOUBLE, scoords, **ncvaropts) slat[:] = source_lat for key, val in share.source_lat.__dict__.iteritems(): if val: setattr(slat, key, val) # Source Cell X Indicies sxi = f.createVariable('source_x_ind', NC_INT, scoords, **ncvaropts) sxi[:] = source_x_ind for key, val in share.source_x_ind.__dict__.iteritems(): if val: setattr(sxi, key, val) # Source Cell Y Indicies syi = f.createVariable('source_y_ind', NC_INT, scoords, **ncvaropts) syi[:] = source_y_ind for key, val in share.source_y_ind.__dict__.iteritems(): if val: setattr(syi, key, val) # Source Cell Decomp IDs sdi = f.createVariable('source_decomp_ind', NC_INT, scoords, **ncvaropts) sdi[:] = source_decomp_ind for key, val in share.source_decomp_ind.__dict__.iteritems(): if val: setattr(sdi, key, val) # Source Cell Time Offset sto = f.createVariable('source_time_offset', NC_INT, scoords, **ncvaropts) sto[:] = source_time_offset for key, val in share.source_time_offset.__dict__.iteritems(): if val: setattr(sto, key, val) # Source to Outlet Index Mapping s2o = f.createVariable('source2outlet_ind', NC_INT, scoords, **ncvaropts) s2o[:] = source2outlet_ind for key, val in share.source2outlet_ind.__dict__.iteritems(): if val: setattr(s2o, key, val) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # 3-D Source Variables uhcords = ('timesteps', ) + scoords + ('tracers', ) tracers = f.createDimension(uhcords[2], 1) # Unit Hydrographs uhs = f.createVariable('unit_hydrograph', NC_DOUBLE, uhcords, **ncvaropts) uhs[:, :] = unit_hydrograph for key, val in share.unit_hydrograph.__dict__.iteritems(): if val: setattr(uhs, key, val) # ---------------------------------------------------------------- # f.close() log.info('Finished writing %s' % file_name)
class Rvar(object): """ Creates a RVIC structure """ # ---------------------------------------------------------------- # # Initialize def __init__(self, param_file, case_name, calendar, out_dir, file_format): self.param_file = param_file f = Dataset(param_file, 'r+') self.n_sources = len(f.dimensions['sources']) self.n_outlets = len(f.dimensions['outlets']) self.subset_length = f.variables['subset_length'][0] self.full_time_length = f.variables['full_time_length'][0] self.unit_hydrograph_dt = f.variables['unit_hydrograph_dt'][0] self.source_lon = f.variables['source_lon'][:] self.source_lat = f.variables['source_lat'][:] self.source_x_ind = f.variables['source_x_ind'][:] self.source_y_ind = f.variables['source_y_ind'][:] self.source_time_offset = f.variables['source_time_offset'][:] self.source2outlet_ind = f.variables['source2outlet_ind'][:] self.outlet_x_ind = f.variables['outlet_x_ind'][:] self.outlet_y_ind = f.variables['outlet_y_ind'][:] self.outlet_lon = f.variables['outlet_lon'][:] self.outlet_lat = f.variables['outlet_lat'][:] self.outlet_mask = f.variables['outlet_mask'][:] self.outlet_decomp_ind = f.variables['outlet_decomp_ind'][:] self.unit_hydrograph = f.variables['unit_hydrograph'][:] self.RvicDomainFile = f.RvicDomainFile self.RvicPourPointsFile = f.RvicPourPointsFile self.RvicUHFile = f.RvicUHFile self.RvicFdrFile = f.RvicFdrFile self.file_format = file_format self.glob_atts = NcGlobals(title='RVIC restart file', RvicPourPointsFile=f.RvicPourPointsFile, RvicUHFile=f.RvicUHFile, RvicFdrFile=f.RvicFdrFile, RvicDomainFile=f.RvicDomainFile, casename=case_name) f.close() # ------------------------------------------------------------ # # Initialize state variables self.ring = np.zeros((self.full_time_length, self.n_outlets, len(RVIC_TRACERS))) # ------------------------------------------------------------ # self._calendar = calendar self.__fname_format = os.path.join(out_dir, "%s.r.%%Y-%%m-%%d-%%H-%%M-%%S.nc" % (case_name)) # CESM calendar key (only NO_LEAP_C, GREGORIAN are supported in CESM) self._calendar_key = 0 for key, cals in CALENDAR_KEYS.iteritems(): if self._calendar in cals: self._calendar_key = key break # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Check that grid file matches def check_grid_file(self, domain_file): """Confirm that the grid files match in the parameter and domain files""" input_file = os.path.split(domain_file)[1] log.info('domain_file: %s' % input_file) log.info('Parameter RvicDomainFile: %s' % self.RvicDomainFile) if input_file == self.RvicDomainFile: log.info('Grid files match in parameter and domain file') else: raise ValueError('Grid files do not match in parameter and domain file') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Initilize State def init_state(self, state_file, run_type, timestamp): if run_type in ['startup', 'restart']: log.info('reading state_file: %s' %state_file) f = Dataset(state_file, 'r+') self.ring = f.variables['ring'][:] file_timestamp = ord_to_datetime(f.variables['time'][:], f.variables['time'].units, calendar=f.variables['time'].calendar) if run_type == 'restart': self.timestamp = file_timestamp elif run_type == 'startup': self.timestamp = timestamp if timestamp != file_timestamp: log.warning('restart timestamps do not match (%s, %s', file_timestamp, self.timestamp) log.warning('Runtype is startup so model will continue') else: raise ValueError('unknown run_type: %s' %run_type) # Check that timestep and outlet_decomp_ids match ParamFile if f.variables['unit_hydrograph_dt'][:] != self.unit_hydrograph_dt: raise ValueError('Timestep in Statefile does not match timestep in ParamFile') if not np.array_equal(f.variables['outlet_decomp_ind'][:], self.outlet_decomp_ind): raise ValueError('outlet_decomp_ind in Statefile does not match ParamFile') if f.RvicDomainFile != self.RvicDomainFile: raise ValueError('RvicDomainFile in StateFile does not match ParamFile') f.close() elif run_type == 'drystart': log.info('run_type is drystart so no state_file will be read') self.timestamp = timestamp self.time_ord = date2num(self.timestamp, TIMEUNITS, calendar=self._calendar) self._start_date = self.timestamp self._start_ord = self.time_ord # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Convolve def convolve(self, aggrunin, time_ord): """ This convoluition funciton works by looping over all points and doing the convolution one timestep at a time. This is accomplished by creating an convolution ring. Contributing flow from each timestep is added to the convolution ring. The convolution ring is saved as the state. The first column of values in the ring are the current runoff. """ # ------------------------------------------------------------ # # Check that the time_ord is in sync # This is the time at the start of the current step (end of last step) if self.time_ord != time_ord: log.error('rout_var.time_ord = %s, time_ord = %s' %(self.time_ord, time_ord)) raise ValueError('rout_var.time_ord does not match the time_ord passed in by the convolution call') # ------------------------------------------------------------ # # ------------------------------------------------------------ # # First update the ring log.debug('rolling the ring') self.ring[0, :, 0] = 0 # Zero out current ring self.ring = np.roll(self.ring, 1, axis=0) # Equivalent to Fortran 90 cshift function # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Do the convolution log.debug('convolving') # this matches the fortran implementation, it may be faster to use np.convolve but testing # can be done later # also this is where the parallelization will happen for nt, tracer in enumerate(RVIC_TRACERS): for s, outlet in enumerate(self.source2outlet_ind): # loop over all source points y = self.source_y_ind[s] x = self.source_x_ind[s] for i in xrange(self.subset_length): j = i + self.source_time_offset[s] self.ring[j, outlet, nt] = self.ring[j, outlet, nt] + (self.unit_hydrograph[i, s, nt] * aggrunin[tracer][y, x]) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # move the time_ord forward self.time_ord += self.unit_hydrograph_dt / SECSPERDAY self.timestamp = ord_to_datetime(self.time_ord, TIMEUNITS, calendar=self._calendar) return self.timestamp # ------------------------------------------------------------ # # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Extract the current rof def get_rof(self): return self.ring[0, :, 0] # Current timestep flux (units=kg m-2 s-1) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Extract the current storage def get_storage(self): return self.ring.sum(axis=1) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Write the current state def write_restart(self, current_history_files, history_restart_files): # ------------------------------------------------------------ # # Open file filename = self.timestamp.strftime(self.__fname_format) f = Dataset(filename, 'w', self.file_format) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Time Variables # Current time time = f.createDimension('time', 1) time = f.createVariable('time', NC_DOUBLE, ('time',)) time[:] = date2num(self.timestamp, TIMEUNITS, calendar=self._calendar) for key, val in share.time.__dict__.iteritems(): if val: setattr(time, key, val) time.calendar = self._calendar # Timesteps timesteps = f.createDimension('timesteps', self.full_time_length) timesteps = f.createVariable('timesteps', NC_DOUBLE, ('timesteps',)) timesteps[:] = np.arange(self.full_time_length) for key, val in share.timesteps.__dict__.iteritems(): if val: setattr(timesteps, key, val) timesteps.timestep_length = 'unit_hydrograph_dt' # UH timestep unit_hydrograph_dt = f.createVariable('unit_hydrograph_dt', NC_DOUBLE, ()) unit_hydrograph_dt[:] = self.unit_hydrograph_dt for key, val in share.unit_hydrograph_dt.__dict__.iteritems(): if val: setattr(unit_hydrograph_dt, key, val) timemgr_rst_type = f.createVariable('timemgr_rst_type', NC_DOUBLE, ()) timemgr_rst_type[:] = self._calendar_key for key, val in share.timemgr_rst_type.__dict__.iteritems(): if val: setattr(timemgr_rst_type, key, val) timemgr_rst_step_sec = f.createVariable('timemgr_rst_step_sec', NC_DOUBLE, ()) timemgr_rst_step_sec[:] = unit_hydrograph_dt for key, val in share.timemgr_rst_step_sec.__dict__.iteritems(): if val: setattr(timemgr_rst_step_sec, key, val) timemgr_rst_start_ymd = f.createVariable('timemgr_rst_start_ymd', NC_DOUBLE, ()) timemgr_rst_start_ymd[:] = self._start_date.year*10000+self._start_date.month*100+self._start_date.day for key, val in share.timemgr_rst_start_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_start_ymd, key, val) timemgr_rst_start_tod = f.createVariable('timemgr_rst_start_tod', NC_DOUBLE, ()) timemgr_rst_start_tod[:] = (self._start_ord%1)*SECSPERDAY for key, val in share.timemgr_rst_start_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_start_tod, key, val) timemgr_rst_ref_ymd = f.createVariable('timemgr_rst_ref_ymd', NC_DOUBLE, ()) timemgr_rst_ref_ymd[:] = REFERENCE_DATE for key, val in share.timemgr_rst_ref_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_ref_ymd, key, val) timemgr_rst_ref_tod = f.createVariable('timemgr_rst_ref_tod', NC_DOUBLE, ()) timemgr_rst_ref_tod[:] = REFERENCE_TIME for key, val in share.timemgr_rst_ref_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_ref_tod, key, val) timemgr_rst_curr_ymd = f.createVariable('timemgr_rst_curr_ymd', NC_DOUBLE, ()) timemgr_rst_curr_ymd[:] = self.timestamp.year*10000+self.timestamp.month*100+self.timestamp.day for key, val in share.timemgr_rst_curr_ymd.__dict__.iteritems(): if val: setattr(timemgr_rst_curr_ymd, key, val) timemgr_rst_curr_tod = f.createVariable('timemgr_rst_curr_tod', NC_DOUBLE, ()) timemgr_rst_curr_tod[:] = (self.time_ord%1)*SECSPERDAY for key, val in share.timemgr_rst_curr_tod.__dict__.iteritems(): if val: setattr(timemgr_rst_curr_tod, key, val) # ------------------------------------------------------------ # # Setup Tape Dimensions coords = ('tapes', 'mak_chars') ntapes = f.createDimension(coords[0], len(history_restart_files)) ntapes = f.createDimension(coords[1], MAX_NC_CHARS) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Write Fields locfnh = f.createVariable('locfnh', NC_CHAR, ('ntapes', 'max_chars',)) locfnh[:] = current_history_files locfnh.long_name = 'History filename' locfnh.comment = 'This variable NOT needed for startup or branch simulations' locfnhr = f.createVariable('locfnhr', NC_CHAR, ('ntapes', 'max_chars',)) locfnhr[:] = history_restart_files locfnhr.long_name = 'Restart history filename' locfnhr.comment = 'This variable NOT needed for startup or branch simulations' # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Setup Point Dimensions coords = ('outlets', 'tracers') outlets = f.createDimension(coords[0], self.n_outlets) tracers = f.createDimension(coords[1], len(RVIC_TRACERS)) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # Write Fields oyi = f.createVariable('outlet_y_ind', NC_INT, coords[0]) oyi[:] = self.outlet_y_ind for key, val in share.outlet_y_ind.__dict__.iteritems(): if val: setattr(oyi, key, val) oxi = f.createVariable('outlet_x_ind', NC_INT, coords[0]) oxi[:] = self.outlet_x_ind for key, val in share.outlet_x_ind.__dict__.iteritems(): if val: setattr(oxi, key, val) odi = f.createVariable('outlet_decomp_ind', NC_INT, coords[0]) odi[:] = self.outlet_decomp_ind for key, val in share.outlet_decomp_ind.__dict__.iteritems(): if val: setattr(odi, key, val) tcoords = ('timesteps',) + coords ring = f.createVariable('ring', NC_DOUBLE, tcoords) ring[:, :, :] = self.ring for key, val in share.ring.__dict__.iteritems(): if val: setattr(ring, key, val) # ------------------------------------------------------------ # # ------------------------------------------------------------ # # write global attributes self.glob_atts.update() for key, val in self.glob_atts.__dict__.iteritems(): if val: setattr(f, key, val) # ------------------------------------------------------------ # f.close() log.info('Finished writing %s' % filename) return filename