def uhs2param_init(config_file): # ---------------------------------------------------------------- # # Read Configuration files config_dict = read_config(config_file) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup Directory Structures directories = make_directories(config_dict['OPTIONS']['CASE_DIR'], ['plots', 'logs', 'params', 'inputs']) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # copy inputs to $case_dir/inputs and update configuration config_dict = copy_inputs(config_file, directories['inputs']) options = config_dict['OPTIONS'] config_dict['POUR_POINTS'] = {'FILE_NAME': config_dict['UHS_FILES']['STATION_FILE']} config_dict['ROUTING']['FILE_NAME'] = 'unknown' config_dict['UH_BOX'] = {'FILE_NAME': 'unknown'} # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Start Logging log = init_logger(directories['logs'], options['LOG_LEVEL'], options['VERBOSE']) for direc in directories: log.info('%s directory is %s' % (direc, directories[direc])) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read domain file (if applicable) dom_data, DomVats, DomGats = read_domain(config_dict['DOMAIN']) log.info('Opened Domain File: %s' % config_dict['DOMAIN']['FILE_NAME']) if 'NEW_DOMAIN' in config_dict: new_dom_data, new_DomVats, \ new_DomGats = read_domain(config_dict['NEW_DOMAIN']) log.info('Opened New Domain File: %s', config_dict['NEW_DOMAIN']['FILE_NAME']) else: new_dom_data = None # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read station file outlets = read_station_file(config_dict['UHS_FILES']['STATION_FILE'], dom_data, config_dict) # ---------------------------------------------------------------- # return dom_data, new_dom_data, outlets, config_dict, directories
def convolution_init(config_file): """ - Read Grid File - Load the unit hydrograph files (put into point_dict) - Load the initial state file and put it in convolution rings """ # ---------------------------------------------------------------- # # Read Configuration files config_dict = read_config(config_file) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup Directory Structure directories = make_directories(config_dict['OPTIONS']['CASE_DIR'], ['hist', 'logs', 'restarts']) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Copy Inputs to $case_dir/inputs and update configuration #config_dict = copy_inputs(config_file, directories['inputs']) options = config_dict['OPTIONS'] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Settup Logging log = init_logger(directories['logs'], options['LOG_LEVEL'], options['VERBOSE']) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Initialize the data model forcings = config_dict['INPUT_FORCINGS'] data_model = DataModel(forcings['DATL_PATH'], forcings['DATL_FILE'], forcings['TIME_VAR'], forcings['LATITUDE_VAR'], forcings['DATL_LIQ_FLDS'], forcings['START'], forcings['END']) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read Domain File domain = config_dict['DOMAIN'] dom_data, dom_vatts, dom_gatts = read_domain(domain, lat0_is_min=data_model.lat0_is_min) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read the Parameter File log.info('reading parameter file %s', config_dict['PARAM_FILE']['FILE_NAME']) rout_var = Rvar(config_dict['PARAM_FILE']['FILE_NAME'], options['CASEID'], options['CALENDAR'], directories['restarts'], options['REST_NCFORM']) rout_var.set_domain(dom_data, domain, data_model.lat0_is_min) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Determine the restart options restart_file = None if options['RUN_TYPE'] == 'restart': restart = read_config(os.path.join(directories['restarts'], 'rpointer')) timestr = restart['RESTART']['TIMESTAMP'] restart_file = restart['RESTART']['FILE_NAME'] elif options['RUN_TYPE'] == 'startup': timestr = options['RUN_STARTDATE'] restart_file = config_dict['INITIAL_STATE']['FILE_NAME'] elif options['RUN_TYPE'] == 'drystart': timestr = options['RUN_STARTDATE'] else: raise ValueError('RUN_TYPE option {0} is none of these: (restart, ' 'startup, drystart)'.format(options['RUN_TYPE'])) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup time_handle time_handle = Dtime(timestr, options['STOP_OPTION'], options['STOP_N'], options['STOP_DATE'], options['REST_OPTION'], options['REST_N'], options['REST_DATE'], options['CALENDAR'], data_model.secs_per_step) time_handle.end = data_model.end data_model.start(time_handle.timestamp, rout_var) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read initial state rout_var.init_state(restart_file, options['RUN_TYPE'], time_handle.timestamp) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Determine the number of aggregation timesteps rout_var.get_time_mode(data_model.secs_per_step) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup history Tape(s) and Write Initial Outputs history = config_dict['HISTORY'] numtapes = int(history['RVICHIST_NTAPES']) hist_tapes = OrderedDict() # make sure history file fields are all in list form if numtapes == 1: for var, value in history.iteritems(): if not isinstance(value, list): history[var] = list([value]) global_atts = NcGlobals(title='RVIC history file', casename=options['CASEID'], casestr=options['CASESTR'], RvicPourPointsFile=os.path.split(rout_var.RvicPourPointsFile)[1], RvicUHFile=os.path.split(rout_var.RvicUHFile)[1], RvicFdrFile=os.path.split(rout_var.RvicFdrFile)[1], RvicDomainFile=os.path.split(domain['FILE_NAME'])[1]) for j in xrange(numtapes): tapename = 'Tape.{0}'.format(j) log.info('setting up History %s', tapename) hist_tapes[tapename] = Tape(time_handle.time_ord, options['CASEID'], rout_var, tape_num=j, fincl=['streamflow'], mfilt=history['RVICHIST_MFILT'][j], ndens=int(history['RVICHIST_NDENS'][j]), nhtfrq=int(history['RVICHIST_NHTFRQ'][j]), avgflag=history['RVICHIST_AVGFLAG'][j], units=history['RVICHIST_UNITS'][j], file_format=history['RVICHIST_NCFORM'][j], outtype=history['RVICHIST_OUTTYPE'][j], grid_area=dom_data[domain['AREA_VAR']], grid_lons=dom_data['cord_lons'], grid_lats=dom_data['cord_lats'], out_dir=directories['hist'], calendar=time_handle.calendar, glob_ats=global_atts) # loop over again and print summary for tapename, tape in hist_tapes.iteritems(): log.info('==========%s==========', tapename) log.info(tape) tape.write_initial() # ---------------------------------------------------------------- # return (hist_tapes, data_model, rout_var, dom_data, time_handle, directories, config_dict)
def gen_uh_init(config_file): """Initialize RVIC parameter""" # ---------------------------------------------------------------- # # Read Configuration files config_dict = read_config(config_file) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Import optional modules if config_dict['OPTIONS']['REMAP'] and not remap_available: raise ValueError('Problem importing remap module ' 'check to make sure cdo.py is available)') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup Directory Structures directories = make_directories(config_dict['OPTIONS']['CASE_DIR'], ['plots', 'logs', 'params', 'inputs']) directories.update( make_directories(config_dict['OPTIONS']['TEMP_DIR'], ['aggregated', 'remapped'])) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # copy inputs to $case_dir/inputs and update configuration config_dict = copy_inputs(config_file, directories['inputs']) options = config_dict['OPTIONS'] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Start Logging log = init_logger(directories['logs'], options['LOG_LEVEL'], options['VERBOSE']) for direc in directories: log.info('%s directory is %s', direc, directories[direc]) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read Pour Points files try: pour_points = pd.read_csv(config_dict['POUR_POINTS']['FILE_NAME'], comment='#') log.info('Opened Pour Points File: ' '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) if not (all(x in pour_points.keys() for x in ['lons', 'lats']) or all(x in pour_points.keys() for x in ['x', 'y'])): raise ValueError('Pour Points File must include ' 'variables (lons, lats) or (x, y)') if 'names' in pour_points: pour_points.fillna(inplace=True, value='unknown') for i, name in enumerate(pour_points.names): pour_points.names[i] = strip_invalid_char(name) pour_points.drop_duplicates(inplace=True) pour_points.dropna() except Exception as e: log.error('Error opening pour points file: ' '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) log.exception(e) raise # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read uh box file uh_file = config_dict['UH_BOX']['FILE_NAME'] uh_header = int(config_dict['UH_BOX']['HEADER_LINES']) uh_box = {} try: uh_box['time'], uh_box['func'] = np.genfromtxt(uh_file, skip_header=uh_header, delimiter=',', unpack=True) log.info('Opened UHbox File: ' '{0}'.format(config_dict['UH_BOX']['FILE_NAME'])) except: log.exception('Error opening uh_box file: ' '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) raise # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read FDR file fdr_file = config_dict['ROUTING']['FILE_NAME'] fdr_var = config_dict['ROUTING']['FLOW_DIRECTION_VAR'] fdr_lat = config_dict['ROUTING']['LATITUDE_VAR'] fdr_lon = config_dict['ROUTING']['LONGITUDE_VAR'] fdr_vel = config_dict['ROUTING']['VELOCITY'] fdr_dif = config_dict['ROUTING']['DIFFUSION'] try: fdr_data, fdr_vatts, _ = read_netcdf(fdr_file) fdr_shape = fdr_data[fdr_var].shape # ---------------------------------------------------------------- # # Check latitude order, flip if necessary. if fdr_data[fdr_lat][-1] > fdr_data[fdr_lat][0]: log.debug('Flow Direction inputs came in upside down, flipping ' 'everything now.') remove_vars = [] for var, data in fdr_data.iteritems(): log.debug('flipping %s', var) if data.ndim >= 1 and var != fdr_lon: fdr_data[var] = np.flipud(data) elif data.ndim == 0: remove_vars.append(var) if remove_vars: for var in remove_vars: del fdr_data[var] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Add velocity and/or diffusion grids if not present yet if not type(fdr_vel) == str: fdr_data['velocity'] = np.zeros(fdr_shape) + fdr_vel config_dict['ROUTING']['VELOCITY'] = 'velocity' log.info('Added velocity grid to fdr_data') if not type(fdr_dif) == str: fdr_data['diffusion'] = np.zeros(fdr_shape) + fdr_dif config_dict['ROUTING']['DIFFUSION'] = 'diffusion' log.info('Added diffusion grid to fdr_data') if ('SOURCE_AREA_VAR' not in config_dict['ROUTING'] or config_dict['ROUTING']['SOURCE_AREA_VAR'] not in fdr_data): log.warning('Upstream `SOURCE_AREA` was not provided, output ' 'source area will be zero.') config_dict['ROUTING']['SOURCE_AREA_VAR'] = 'src_area' fdr_data[config_dict['ROUTING']['SOURCE_AREA_VAR']] = \ np.zeros(fdr_shape) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # fdr_data['resolution'] = np.abs(fdr_data[fdr_lon][1] - fdr_data[fdr_lon][0]) check_ncvars(config_dict['ROUTING'], fdr_data.keys()) # ---------------------------------------------------------------- # log.info('Opened FDR File: {0}'.format(fdr_file)) except: log.exception('Error opening FDR file') raise # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read domain file domain = config_dict['DOMAIN'] dom_data, _, _ = read_domain(domain) log.info('Opened Domain File: ' '{0}'.format(domain['FILE_NAME'])) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # If remap is False, domain coordinates needs to be in the fdr coordinates # We can move the unit hydrographs to the domain grid later if options['AGGREGATE'] and not options['REMAP']: log.error('RVIC parameter generation requires REMAP option to be True' ' if AGGREGATE is True') raise ValueError('Invalid option') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Group pour points (if aggregate) if options['AGGREGATE']: outlets = make_agg_pairs(pour_points, dom_data, fdr_data, config_dict) log.info('Finished making agg pairs of ' 'pour points and outlet grid cells') else: outlets = {} if all(x in pour_points.keys() for x in ['x', 'y', 'lons', 'lats']): lats = pour_points['lats'].values lons = pour_points['lons'].values routys = pour_points['y'].values routxs = pour_points['x'].values elif all(x in pour_points.keys() for x in ['x', 'y']): # use x and y (assume from routing inputs grid) # find lons and lats from xs and ys routys = pour_points['y'].values routxs = pour_points['x'].values lats = fdr_data[fdr_lat][routys] lons = fdr_data[fdr_lon][routxs] else: # use lons and lats to find xs and ys lats = pour_points['lats'].values lons = pour_points['lons'].values # find x and y on routing grid routys, routxs = latlon2yx(plats=lats, plons=lons, glats=fdr_data[fdr_lat], glons=fdr_data[fdr_lon]) if options['SEARCH_FOR_CHANNEL']: routys, routxs = search_for_channel( fdr_data[config_dict['ROUTING']['SOURCE_AREA_VAR']], routys, routxs, tol=10, search=2) # update lats and lons lats = fdr_data[fdr_lat][routys] lons = fdr_data[fdr_lon][routxs] # Find location on domain grid domys, domxs = latlon2yx(plats=lats, plons=lons, glats=dom_data[domain['LATITUDE_VAR']], glons=dom_data[domain['LONGITUDE_VAR']]) for i in xrange(len(lats)): if 'names' in pour_points.keys(): name = pour_points['names'].values[i] name = name.replace("'", '').replace(" ", "_") else: # fill name filed with p-outlet_num name = 'p-{0}'.format(i) outlets[i] = Point(lat=lats[i], lon=lons[i], domx=domxs[i], domy=domys[i], routx=routxs[i], routy=routys[i], name=name, cell_id=dom_data['cell_ids'][domys[i], domxs[i]]) outlets[i].pour_points = [outlets[i]] # ---------------------------------------------------------------- # log.debug(outlets) log.info('Finished with gen_uh_init') log.info('-------------------------------------------------------------\n') return (uh_box, fdr_data, fdr_vatts, dom_data, outlets, config_dict, directories)
def gen_uh_init(config_file): """Initialize RVIC parameter""" # ---------------------------------------------------------------- # # Read Configuration files config_dict = read_config(config_file) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Import optional modules if config_dict['OPTIONS']['REMAP'] and not remap_available: raise ValueError('Problem importing remap module ' 'check to make sure cdo.py is available)') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup Directory Structures directories = make_directories(config_dict['OPTIONS']['CASE_DIR'], ['plots', 'logs', 'params', 'inputs']) directories.update(make_directories(config_dict['OPTIONS']['TEMP_DIR'], ['aggregated', 'remapped'])) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # copy inputs to $case_dir/inputs and update configuration config_dict = copy_inputs(config_file, directories['inputs']) options = config_dict['OPTIONS'] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Start Logging log = init_logger(directories['logs'], options['LOG_LEVEL'], options['VERBOSE']) for direc in directories: log.info('%s directory is %s', direc, directories[direc]) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read Pour Points files try: pour_points = pd.read_csv(config_dict['POUR_POINTS']['FILE_NAME'], comment='#') log.info('Opened Pour Points File: ' '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) if not (all(x in pour_points.keys() for x in ['lons', 'lats']) or all(x in pour_points.keys() for x in ['x', 'y'])): raise ValueError('Pour Points File must include ' 'variables (lons, lats) or (x, y)') if 'names' in pour_points: pour_points.fillna(inplace=True, value='unknown') for i, name in enumerate(pour_points.names): pour_points.names[i] = strip_invalid_char(name) pour_points.drop_duplicates(inplace=True) pour_points.dropna() except Exception as e: log.error('Error opening pour points file: ' '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) log.exception(e) raise # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read uh box file uh_file = config_dict['UH_BOX']['FILE_NAME'] uh_header = int(config_dict['UH_BOX']['HEADER_LINES']) uh_box = {} try: uh_box['time'], uh_box['func'] = np.genfromtxt(uh_file, skip_header=uh_header, delimiter=',', unpack=True) log.info('Opened UHbox File: ' '{0}'.format(config_dict['UH_BOX']['FILE_NAME'])) except: log.exception('Error opening uh_box file: ' '{0}'.format(config_dict['POUR_POINTS']['FILE_NAME'])) raise # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read FDR file fdr_file = config_dict['ROUTING']['FILE_NAME'] fdr_var = config_dict['ROUTING']['FLOW_DIRECTION_VAR'] fdr_lat = config_dict['ROUTING']['LATITUDE_VAR'] fdr_lon = config_dict['ROUTING']['LONGITUDE_VAR'] fdr_vel = config_dict['ROUTING']['VELOCITY'] fdr_dif = config_dict['ROUTING']['DIFFUSION'] try: fdr_data, fdr_vatts, _ = read_netcdf(fdr_file) fdr_shape = fdr_data[fdr_var].shape # ---------------------------------------------------------------- # # Check latitude order, flip if necessary. if fdr_data[fdr_lat][-1] > fdr_data[fdr_lat][0]: log.debug('Flow Direction inputs came in upside down, flipping ' 'everything now.') remove_vars = [] for var, data in fdr_data.iteritems(): log.debug('flipping %s', var) if data.ndim >= 1 and var != fdr_lon: fdr_data[var] = np.flipud(data) elif data.ndim == 0: remove_vars.append(var) if remove_vars: for var in remove_vars: del fdr_data[var] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Add velocity and/or diffusion grids if not present yet if not type(fdr_vel) == str: fdr_data['velocity'] = np.zeros(fdr_shape) + fdr_vel config_dict['ROUTING']['VELOCITY'] = 'velocity' log.info('Added velocity grid to fdr_data') if not type(fdr_dif) == str: fdr_data['diffusion'] = np.zeros(fdr_shape) + fdr_dif config_dict['ROUTING']['DIFFUSION'] = 'diffusion' log.info('Added diffusion grid to fdr_data') if ('SOURCE_AREA_VAR' not in config_dict['ROUTING'] or config_dict['ROUTING']['SOURCE_AREA_VAR'] not in fdr_data): log.warning('Upstream `SOURCE_AREA` was not provided, output ' 'source area will be zero.') config_dict['ROUTING']['SOURCE_AREA_VAR'] = 'src_area' fdr_data[config_dict['ROUTING']['SOURCE_AREA_VAR']] = \ np.zeros(fdr_shape) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # fdr_data['resolution'] = np.abs(fdr_data[fdr_lon][1] - fdr_data[fdr_lon][0]) check_ncvars(config_dict['ROUTING'], fdr_data.keys()) # ---------------------------------------------------------------- # log.info('Opened FDR File: {0}'.format(fdr_file)) except: log.exception('Error opening FDR file') raise # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read domain file domain = config_dict['DOMAIN'] dom_data, _, _ = read_domain(domain) log.info('Opened Domain File: ' '{0}'.format(domain['FILE_NAME'])) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # If remap is False, domain coordinates needs to be in the fdr coordinates # We can move the unit hydrographs to the domain grid later if options['AGGREGATE'] and not options['REMAP']: log.error('RVIC parameter generation requires REMAP option to be True' ' if AGGREGATE is True') raise ValueError('Invalid option') # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Group pour points (if aggregate) if options['AGGREGATE']: outlets = make_agg_pairs(pour_points, dom_data, fdr_data, config_dict) log.info('Finished making agg pairs of ' 'pour points and outlet grid cells') else: outlets = {} if all(x in pour_points.keys() for x in ['x', 'y', 'lons', 'lats']): lats = pour_points['lats'].values lons = pour_points['lons'].values routys = pour_points['y'].values routxs = pour_points['x'].values elif all(x in pour_points.keys() for x in ['x', 'y']): # use x and y (assume from routing inputs grid) # find lons and lats from xs and ys routys = pour_points['y'].values routxs = pour_points['x'].values lats = fdr_data[fdr_lat][routys] lons = fdr_data[fdr_lon][routxs] else: # use lons and lats to find xs and ys lats = pour_points['lats'].values lons = pour_points['lons'].values # find x and y on routing grid routys, routxs = latlon2yx(plats=lats, plons=lons, glats=fdr_data[fdr_lat], glons=fdr_data[fdr_lon]) if options['SEARCH_FOR_CHANNEL']: routys, routxs = search_for_channel( fdr_data[config_dict['ROUTING']['SOURCE_AREA_VAR']], routys, routxs, tol=10, search=2) # update lats and lons lats = fdr_data[fdr_lat][routys] lons = fdr_data[fdr_lon][routxs] # Find location on domain grid domys, domxs = latlon2yx(plats=lats, plons=lons, glats=dom_data[domain['LATITUDE_VAR']], glons=dom_data[domain['LONGITUDE_VAR']]) for i in xrange(len(lats)): if 'names' in pour_points.keys(): name = pour_points['names'].values[i] name = name.replace("'", '').replace(" ", "_") else: # fill name filed with p-outlet_num name = 'p-{0}'.format(i) outlets[i] = Point(lat=lats[i], lon=lons[i], domx=domxs[i], domy=domys[i], routx=routxs[i], routy=routys[i], name=name, cell_id=dom_data['cell_ids'][domys[i], domxs[i]]) outlets[i].pour_points = [outlets[i]] # ---------------------------------------------------------------- # log.debug(outlets) log.info('Finished with gen_uh_init') log.info('-------------------------------------------------------------\n') return (uh_box, fdr_data, fdr_vatts, dom_data, outlets, config_dict, directories)
def main(): args = parse_args() user_config = importlib.import_module('user_config.' + args.config.replace('/', '.')) gen_config = user_config.GenConfig() disc_config = user_config.DiscConfig() L = init_logger(gen_config.tag, gen_config.ckpt_path) L.info("set config G: %s" % gen_config) L.info("set config D: %s" % disc_config) gen_saver = Saver(gen_config) generator, gen_optim, global_step, last_epoch = gen_saver.load() disc_saver = Saver(disc_config) discriminator, disc_optim, _, _ = disc_saver.load() models = { 'gen': generator, 'disc': discriminator } optims = { 'gen': gen_optim, 'disc': disc_optim } gen_to_save = generator disc_to_save = discriminator if gen_config.multi_gpu: models['gen'] = torch.nn.DataParallel(generator) models['disc'] = torch.nn.DataParallel(discriminator) gen_scheduler, disc_scheduler = None, None if hasattr(gen_config, 'scheduler'): gen_config.scheduler_param['last_epoch'] = -1 gen_scheduler = gen_config.scheduler(gen_optim, **gen_config.scheduler_param) disc_scheduler = gen_config.scheduler(disc_optim, **gen_config.scheduler_param) log_dir = os.path.join(gen_config.ckpt_path, 'tb') writer = SummaryWriter(log_dir=log_dir) trainer = gen_config.trainer(gen_config, models, optims, writer) # validator = gen_config.validator(gen_config, generator, writer) for epoch in range(last_epoch + 1, gen_config.epoch): _, global_step, avg_loss = trainer.step(epoch, global_step) L.info('Training epoch %d was done. (avg_loss: %f)' % (epoch, avg_loss)) # result, avg_acc = validator.step(epoch, global_step) # L.info('Validation epoch %d was done. (avg_acc: %f)' % (epoch, avg_acc)) L.info('Saving the trained generator model... (%d epoch, %d step)' % (epoch, global_step)) gen_saver.save(gen_to_save, gen_optim, global_step, epoch, performance=0, perf_op='lt') L.info('Saving G is finished.') L.info('Saving the trained discriminator model... (%d epoch, %d step)' % (epoch, global_step)) disc_saver.save(disc_to_save, disc_optim, global_step, epoch, performance=0, perf_op='lt') L.info('Saving D is finished.') if gen_scheduler: gen_scheduler.step(epoch) disc_scheduler.step(epoch)
from core.log import init_logger STATE_API_VERSION = "0.12.1" init_logger()
def convolution_init(config_file): """ - Read Grid File - Load the unit hydrograph files (put into point_dict) - Load the initial state file and put it in convolution rings """ # ---------------------------------------------------------------- # # Read Configuration files config_dict = read_config(config_file) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup Directory Structure directories = make_directories(config_dict['OPTIONS']['CASE_DIR'], ['hist', 'logs', 'restarts']) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Copy Inputs to $case_dir/inputs and update configuration #config_dict = copy_inputs(config_file, directories['inputs']) options = config_dict['OPTIONS'] # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Settup Logging log = init_logger(directories['logs'], options['LOG_LEVEL'], options['VERBOSE']) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Initialize the data model forcings = config_dict['INPUT_FORCINGS'] data_model = DataModel(forcings['DATL_PATH'], forcings['DATL_FILE'], forcings['TIME_VAR'], forcings['LATITUDE_VAR'], forcings['DATL_LIQ_FLDS'], forcings['START'], forcings['END']) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read Domain File domain = config_dict['DOMAIN'] dom_data, dom_vatts, dom_gatts = read_domain( domain, lat0_is_min=data_model.lat0_is_min) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read the Parameter File log.info('reading parameter file %s', config_dict['PARAM_FILE']['FILE_NAME']) rout_var = Rvar(config_dict['PARAM_FILE']['FILE_NAME'], options['CASEID'], options['CALENDAR'], directories['restarts'], options['REST_NCFORM']) rout_var.set_domain(dom_data, domain, data_model.lat0_is_min) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Determine the restart options restart_file = None if options['RUN_TYPE'] == 'restart': restart = read_config(os.path.join(directories['restarts'], 'rpointer')) timestr = restart['RESTART']['TIMESTAMP'] restart_file = restart['RESTART']['FILE_NAME'] elif options['RUN_TYPE'] == 'startup': timestr = options['RUN_STARTDATE'] restart_file = config_dict['INITIAL_STATE']['FILE_NAME'] elif options['RUN_TYPE'] == 'drystart': timestr = options['RUN_STARTDATE'] else: raise ValueError('RUN_TYPE option {0} is none of these: (restart, ' 'startup, drystart)'.format(options['RUN_TYPE'])) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup time_handle time_handle = Dtime(timestr, options['STOP_OPTION'], options['STOP_N'], options['STOP_DATE'], options['REST_OPTION'], options['REST_N'], options['REST_DATE'], options['CALENDAR'], data_model.secs_per_step) time_handle.end = data_model.end data_model.start(time_handle.timestamp, rout_var) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Read initial state rout_var.init_state(restart_file, options['RUN_TYPE'], time_handle.timestamp) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Determine the number of aggregation timesteps rout_var.get_time_mode(data_model.secs_per_step) # ---------------------------------------------------------------- # # ---------------------------------------------------------------- # # Setup history Tape(s) and Write Initial Outputs history = config_dict['HISTORY'] numtapes = int(history['RVICHIST_NTAPES']) hist_tapes = OrderedDict() # make sure history file fields are all in list form if numtapes == 1: for var, value in history.iteritems(): if not isinstance(value, list): history[var] = list([value]) global_atts = NcGlobals( title='RVIC history file', casename=options['CASEID'], casestr=options['CASESTR'], RvicPourPointsFile=os.path.split(rout_var.RvicPourPointsFile)[1], RvicUHFile=os.path.split(rout_var.RvicUHFile)[1], RvicFdrFile=os.path.split(rout_var.RvicFdrFile)[1], RvicDomainFile=os.path.split(domain['FILE_NAME'])[1]) for j in xrange(numtapes): tapename = 'Tape.{0}'.format(j) log.info('setting up History %s', tapename) hist_tapes[tapename] = Tape(time_handle.time_ord, options['CASEID'], rout_var, tape_num=j, fincl=['streamflow'], mfilt=history['RVICHIST_MFILT'][j], ndens=int(history['RVICHIST_NDENS'][j]), nhtfrq=int(history['RVICHIST_NHTFRQ'][j]), avgflag=history['RVICHIST_AVGFLAG'][j], units=history['RVICHIST_UNITS'][j], file_format=history['RVICHIST_NCFORM'][j], outtype=history['RVICHIST_OUTTYPE'][j], grid_area=dom_data[domain['AREA_VAR']], grid_lons=dom_data['cord_lons'], grid_lats=dom_data['cord_lats'], out_dir=directories['hist'], calendar=time_handle.calendar, glob_ats=global_atts) # loop over again and print summary for tapename, tape in hist_tapes.iteritems(): log.info('==========%s==========', tapename) log.info(tape) tape.write_initial() # ---------------------------------------------------------------- # return (hist_tapes, data_model, rout_var, dom_data, time_handle, directories, config_dict)