def parseReport(self): """ Parse the options related to reporting """ # get all relevant options snowav_mcfg = MasterConfig(modules='snowav') self.sec_snowav = snowav_mcfg.cfg.keys() # make reporting directory self.path_report_o = os.path.join(self.path_wy, 'reports') self.path_report_i = os.path.join( self.path_report_o, 'report_{}'.format(self.folder_date_stamp)) if not os.path.exists(self.path_report_i): os.makedirs(self.path_report_i) # fill in some of the config options self.config['report']['rep_path'] = self.path_report_i self.config['snowav system']['save_path'] = self.path_report_i self.config['snowav system']['wy'] = self.wy self.config['runs']['run_dirs'] = [self.pathro] # create updated config for report self.report_config = os.path.join(self.path_report_o, 'snowav_cfg.ini') #generate_config(self.ucfg, self.report_config) ##### new stuff # Write out config file to run smrf # make copy and delete only awsm sections snowav_cfg = copy.deepcopy(self.ucfg) for key in self.ucfg.cfg.keys(): if key in self.sec_awsm or key in self.sec_smrf: del snowav_cfg.cfg[key] self.tmp_log.append('Writing the config file for snowav') generate_config(snowav_cfg, self.report_config)
def make_vertical_plot(config_file): """ Main function in snowplot to interpret config files and piece together the plot users describe in the config file Args: config_file: config file in .ini format and can be checked with inicheck """ # Get the cfg ucfg = get_user_config(config_file, modules=['snowplot']) warnings, errors = check_config(ucfg) print_config_report(warnings, errors) if len(errors) > 0: print("Errors in config file. Check report above.") sys.exit() # outut a config file out = ucfg.cfg['output']['output_dir'] if not isdir(out): mkdir(out) generate_config(ucfg, join(out, 'config_full.ini')) # Grab a copy of the config dictionary cfg = ucfg.cfg data = {} # gather all the templates for creating profiles profile_classes = get_checkers(module='snowplot.profiles', keywords='profile') # Create a map of the class names to the config names requested_profiles = OrderedDict() for v in cfg.keys(): if v not in __non_data_sections__: k = v.replace('_', '').lower() requested_profiles[k] = v # Create the profile objects and prerpare to add them to the figure for profile_name, cls in profile_classes.items(): if profile_name in requested_profiles.keys(): name = requested_profiles[profile_name] log.info("Building {} profile".format(name)) # Add it to the dictionary of data data[profile_name] = cls(**cfg[name]) # Build the final figure build_figure(data, cfg)
def config(self, cfg_dict, section): s = f'[{section}]\n' for k, v in cfg_dict.items(): s += f'{k}: {v}\n' f = join(dirname(__file__), 'config.ini') s += '[output]\n' s += 'show_plot: False\n' s += 'dpi: 50\n' s += 'filename: figure.png\n' with open(f, mode='w+') as fp: fp.write(s) # Populate the config ucfg = get_user_config(f, modules=['snowplot']) # Write out the new config again generate_config(ucfg, f) yield f if isfile(f): os.remove(f)
def __init__(self, config): """ Initialize the model, read config file, start and end date, and logging Args: config: string path to the config file or inicheck UserConfig instance """ # read the config file and store awsm_mcfg = MasterConfig(modules='awsm') smrf_mcfg = MasterConfig(modules='smrf') if isinstance(config, str): if not os.path.isfile(config): raise Exception('Configuration file does not exist --> {}' .format(config)) configFile = config try: combined_mcfg = MasterConfig(modules=['smrf', 'awsm']) # Read in the original users config self.ucfg = get_user_config(configFile, mcfg=combined_mcfg) self.configFile = configFile except UnicodeDecodeError as e: print(e) raise Exception(('The configuration file is not encoded in ' 'UTF-8, please change and retry')) elif isinstance(config, UserConfig): self.ucfg = config configFile = '' else: raise Exception( 'Config passed to AWSM is neither file name nor UserConfig instance') # get the git version self.gitVersion = awsm_utils.getgitinfo() # create blank log and error log because logger is not initialized yet self.tmp_log = [] self.tmp_err = [] self.tmp_warn = [] # Check the user config file for errors and report issues if any self.tmp_log.append("Checking config file for issues...") warnings, errors = check_config(self.ucfg) print_config_report(warnings, errors) self.config = self.ucfg.cfg # Exit AWSM if config file has errors if len(errors) > 0: print("Errors in the config file. " "See configuration status report above.") # sys.exit() # ################## Decide which modules to run ##################### self.do_smrf = self.config['awsm master']['run_smrf'] #self.do_isnobal = self.config['awsm master']['run_isnobal'] self.model_type = self.config['awsm master']['model_type'] # self.do_smrf_ipysnobal = \ # self.config['awsm master']['run_smrf_ipysnobal'] # self.do_ipysnobal = self.config['awsm master']['run_ipysnobal'] self.do_forecast = False if 'gridded' in self.config and self.do_smrf: self.do_forecast = self.config['gridded']['hrrr_forecast_flag'] # WARNING: The value here is inferred in SMRF.data.loadGrid. A # change here requires a change there self.n_forecast_hours = 18 # Options for converting files self.do_make_in = self.config['awsm master']['make_in'] self.do_make_nc = self.config['awsm master']['make_nc'] # do report? # self.do_report = self.config['awsm master']['do_report'] self.snowav_config = self.config['awsm master']['snowav_config'] # options for masking isnobal self.mask_isnobal = self.config['awsm master']['mask_isnobal'] # prompt for making directories self.prompt_dirs = self.config['awsm master']['prompt_dirs'] # store smrf version if running smrf self.smrf_version = smrf.__version__ # ################ Time information ################## self.start_date = pd.to_datetime(self.config['time']['start_date']) self.end_date = pd.to_datetime(self.config['time']['end_date']) self.time_step = self.config['time']['time_step'] self.tmz = self.config['time']['time_zone'] self.tzinfo = pytz.timezone(self.config['time']['time_zone']) # date to use for finding wy tmp_date = self.start_date.replace(tzinfo=self.tzinfo) tmp_end_date = self.end_date.replace(tzinfo=self.tzinfo) # find water year hour of start and end date self.start_wyhr = int(utils.water_day(tmp_date)[0]*24) self.end_wyhr = int(utils.water_day(tmp_end_date)[0]*24) # find start of water year tmpwy = utils.water_day(tmp_date)[1] - 1 self.wy_start = pd.to_datetime('{:d}-10-01'.format(tmpwy)) # ################ Store some paths from config file ################## # path to the base drive (i.e. /data/blizzard) if self.config['paths']['path_dr'] is not None: self.path_dr = os.path.abspath(self.config['paths']['path_dr']) else: print('No base path to drive given. Exiting now!') sys.exit() # name of your basin (i.e. Tuolumne) self.basin = self.config['paths']['basin'] # water year of run self.wy = utils.water_day(tmp_date)[1] # if the run is operational or not self.isops = self.config['paths']['isops'] # name of project if not an operational run self.proj = self.config['paths']['proj'] # check for project description self.desc = self.config['paths']['desc'] # find style for folder date stamp self.folder_date_style = self.config['paths']['folder_date_style'] # setting to output in seperate daily folders self.daily_folders = self.config['awsm system']['daily_folders'] if self.daily_folders and not self.run_smrf_ipysnobal: raise ValueError('Cannot run daily_folders with anything other' ' than run_smrf_ipysnobal') if self.do_forecast: self.tmp_log.append('Forecasting set to True') # self.fp_forecastdata = self.config['gridded']['wrf_file'] # if self.fp_forecastdata is None: # self.tmp_err.append('Forecast set to true, ' # 'but no grid file given') # print("Errors in the config file. See configuration " # "status report above.") # print(self.tmp_err) # sys.exit() if self.config['system']['threading']: # Can't run threaded smrf if running forecast_data self.tmp_err.append('Cannot run SMRF threaded with' ' gridded input data') print(self.tmp_err) sys.exit() # Time step mass thresholds for iSnobal self.mass_thresh = [] self.mass_thresh.append(self.config['grid']['thresh_normal']) self.mass_thresh.append(self.config['grid']['thresh_medium']) self.mass_thresh.append(self.config['grid']['thresh_small']) # threads for running iSnobal self.ithreads = self.config['awsm system']['ithreads'] # how often to output form iSnobal self.output_freq = self.config['awsm system']['output_frequency'] # number of timesteps to run if ou don't want to run the whole thing self.run_for_nsteps = self.config['awsm system']['run_for_nsteps'] # pysnobal output variables self.pysnobal_output_vars = self.config['awsm system']['variables'] self.pysnobal_output_vars = [wrd.lower() for wrd in self.pysnobal_output_vars] # snow and emname self.snow_name = self.config['awsm system']['snow_name'] self.em_name = self.config['awsm system']['em_name'] # options for restarting iSnobal self.restart_crash = False if self.config['isnobal restart']['restart_crash']: self.restart_crash = True # self.new_init = self.config['isnobal restart']['new_init'] self.depth_thresh = self.config['isnobal restart']['depth_thresh'] self.restart_hr = \ int(self.config['isnobal restart']['wyh_restart_output']) self.restart_folder = self.config['isnobal restart']['output_folders'] # iSnobal active layer self.active_layer = self.config['grid']['active_layer'] # if we are going to run ipysnobal with smrf if self.model_type in ['ipysnobal', 'smrf_ipysnobal']: self.ipy_threads = self.ithreads self.ipy_init_type = \ self.config['files']['init_type'] self.forcing_data_type = \ self.config['ipysnobal']['forcing_data_type'] # parameters needed for restart procedure self.restart_run = False if self.config['isnobal restart']['restart_crash']: self.restart_run = True # find restart hour datetime reset_offset = pd.to_timedelta(self.restart_hr, unit='h') # set a new start date for this run self.restart_date = self.wy_start + reset_offset self.tmp_log.append('Restart date is {}'.format(self.start_date)) # read in update depth parameters self.update_depth = False if 'update depth' in self.config: self.update_depth = self.config['update depth']['update'] if self.update_depth: self.update_file = self.config['update depth']['update_file'] self.update_buffer = self.config['update depth']['buffer'] self.flight_numbers = self.config['update depth']['flight_numbers'] # if flights to use is not list, make it a list if self.flight_numbers is not None: if not isinstance(self.flight_numbers, list): self.flight_numbers = [self.flight_numbers] # list of sections releated to AWSM # These will be removed for smrf config self.sec_awsm = awsm_mcfg.cfg.keys() self.sec_smrf = smrf_mcfg.cfg.keys() # Make rigid directory structure self.mk_directories() # ################ Topo data for iSnobal ################## # get topo stats self.csys = self.config['grid']['csys'].upper() self.nbits = int(self.config['grid']['nbits']) self.soil_temp = self.config['soil_temp']['temp'] # get topo class self.topo = mytopo(self.config['topo'], self.mask_isnobal, self.model_type, self.csys, self.pathdd) # ################ Generate config backup ################## # if self.config['output']['input_backup']: # set location for backup and output backup of awsm sections config_backup_location = \ os.path.join(self.pathdd, 'awsm_config_backup.ini') generate_config(self.ucfg, config_backup_location) # create log now that directory structure is done self.createLog() # if we have a model, initialize it if self.model_type is not None: self.myinit = modelInit(self._logger, self.config, self.topo, self.start_wyhr, self.pathro, self.pathrr, self.pathinit, self.wy_start)
def main(): """ Runs a command that only takes a config. This was originally written to perform multiyear analysis on swiflows calibration commands. """ p = argparse.ArgumentParser( description="Runs a command that takes a config" " file for mulitple years.") p.add_argument("cmd", help="Command to execute like cmd config.ini") p.add_argument("config", help="Config file containing all settings for" " running the cmd") p.add_argument("-wy", "--years", dest='years', required=True, nargs="+", help="Water years to run by changing the start and end" " times in the config, if not provided it will just run" " the cmd, assumes the month and day assigned in the " " config are constant") p.add_argument("-m", "--modules", dest='modules', required=False, nargs='+', help="Python packages the config is associated to") p.add_argument("-o", "--output", dest='output', required=False, default='./output', help="Python packages the config is associated to") args = p.parse_args() # Manage the config paths and modules and grab the config orig_path = abspath(args.config) if args.modules: modules = args.modules else: modules = None ucfg = get_user_config(orig_path, modules=modules) # Setup output path output = abspath(args.output) if not isdir(output): os.makedirs(output) print("Running {} {} over {}...".format(args.cmd, args.config, ", ".join(args.years))) # Determine the dates to modify and report it results = find_section_item_start_stop(ucfg) sec_name = results['section'] start_name = results['start'] end_name = results['end'] start = ucfg.cfg[sec_name][start_name] end = ucfg.cfg[sec_name][end_name] print("Found the modifiable start and stop dates in the config file...") print("Start in configs Section: {} Item: {}".format(sec_name, start_name)) print("End in configs Section: {} Item: {}".format(sec_name, end_name)) fmt = "%m-%d" print("Running {} year over {} - {}".format(len(args.years), start.strftime(fmt), end.strftime(fmt))) for wyr in args.years: print("Adjusting config for WY{} and running...".format(wyr)) # Update the paths for modifying current_output = join(output, wyr) current_path = join(current_output, "config.ini") ucfg = set_dates(ucfg, wyr, sec_name, start_name, end_name) if not isdir(current_output): os.makedirs(current_output) else: print("WARN: WY{} has data in it, you could be overwriting" " data...".format(wyr)) # Write the config so we can run generate_config(ucfg, current_path) # Build the command cmd = "{} {}".format(args.cmd, current_path) s = check_output(cmd, shell=True) print(s.decode("utf-8"))
def __init__(self, config, testing=False): """ Initialize the model, read config file, start and end date, and logging Args: config: string path to the config file or inicheck UserConfig instance """ self.read_config(config) self.testing = testing # create blank log and error log because logger is not initialized yet self.tmp_log = [] self.tmp_err = [] self.tmp_warn = [] self.parse_time() self.parse_folder_structure() self.mk_directories() self.create_log() # ################## Decide which modules to run ##################### self.do_smrf = self.config['awsm master']['run_smrf'] self.model_type = self.config['awsm master']['model_type'] # self.do_smrf_ipysnobal = \ # self.config['awsm master']['run_smrf_ipysnobal'] # self.do_ipysnobal = self.config['awsm master']['run_ipysnobal'] self.do_forecast = False if 'gridded' in self.config and self.do_smrf: # self.do_forecast = self.config['gridded']['hrrr_forecast_flag'] # WARNING: The value here is inferred in SMRF.data.loadGrid. A # change here requires a change there self.n_forecast_hours = 18 # options for masking isnobal self.mask_isnobal = self.config['awsm master']['mask_isnobal'] # store smrf version if running smrf self.smrf_version = smrf.__version__ if self.do_forecast: self.tmp_log.append('Forecasting set to True') # self.fp_forecastdata = self.config['gridded']['wrf_file'] # if self.fp_forecastdata is None: # self.tmp_err.append('Forecast set to true, ' # 'but no grid file given') # print("Errors in the config file. See configuration " # "status report above.") # print(self.tmp_err) # sys.exit() if self.config['system']['threading']: # Can't run threaded smrf if running forecast_data self.tmp_err.append('Cannot run SMRF threaded with' ' gridded input data') print(self.tmp_err) sys.exit() # Time step mass thresholds for iSnobal self.mass_thresh = [] self.mass_thresh.append(self.config['grid']['thresh_normal']) self.mass_thresh.append(self.config['grid']['thresh_medium']) self.mass_thresh.append(self.config['grid']['thresh_small']) # threads for running iSnobal self.ithreads = self.config['awsm system']['ithreads'] # how often to output form iSnobal self.output_freq = self.config['awsm system']['output_frequency'] # number of timesteps to run if ou don't want to run the whole thing self.run_for_nsteps = self.config['awsm system']['run_for_nsteps'] # pysnobal output variables self.pysnobal_output_vars = self.config['awsm system']['variables'] self.pysnobal_output_vars = [wrd.lower() for wrd in self.pysnobal_output_vars] # snow and emname self.snow_name = self.config['awsm system']['snow_name'] self.em_name = self.config['awsm system']['em_name'] # options for restarting iSnobal self.restart_crash = False if self.config['isnobal restart']['restart_crash']: self.restart_crash = True # self.new_init = self.config['isnobal restart']['new_init'] self.depth_thresh = self.config['isnobal restart']['depth_thresh'] self.restart_hr = \ int(self.config['isnobal restart']['wyh_restart_output']) self.restart_folder = self.config['isnobal restart']['output_folders'] # iSnobal active layer self.active_layer = self.config['grid']['active_layer'] # if we are going to run ipysnobal with smrf if self.model_type in ['ipysnobal', 'smrf_ipysnobal']: self.ipy_threads = self.ithreads self.ipy_init_type = \ self.config['files']['init_type'] self.forcing_data_type = \ self.config['ipysnobal']['forcing_data_type'] # parameters needed for restart procedure self.restart_run = False if self.config['isnobal restart']['restart_crash']: self.restart_run = True # find restart hour datetime reset_offset = pd.to_timedelta(self.restart_hr, unit='h') # set a new start date for this run self.tmp_log.append('Restart date is {}'.format(self.start_date)) # read in update depth parameters self.update_depth = False if 'update depth' in self.config: self.update_depth = self.config['update depth']['update'] if self.update_depth: self.update_file = self.config['update depth']['update_file'] self.update_buffer = self.config['update depth']['buffer'] self.flight_numbers = self.config['update depth']['flight_numbers'] # if flights to use is not list, make it a list if self.flight_numbers is not None: if not isinstance(self.flight_numbers, list): self.flight_numbers = [self.flight_numbers] # ################ Topo data for iSnobal ################## self.soil_temp = self.config['soil_temp']['temp'] self.load_topo() # ################ Generate config backup ################## # if self.config['output']['input_backup']: # set location for backup and output backup of awsm sections config_backup_location = \ os.path.join(self.path_output, 'awsm_config_backup.ini') generate_config(self.ucfg, config_backup_location) # create log now that directory structure is done # self.create_log() self.smrf_connector = SMRFConnector(self) # if we have a model, initialize it if self.model_type is not None: self.model_init = ModelInit( self.config, self.topo, self.path_output, self.start_date)
def parse(self, external_logger=None): """ Parse config options. """ self.snowav_version = snowav.__version__ self.cclimit = -5 * 1000 * 1000 self.barcolors = [ 'xkcd:cobalt', 'xkcd:mustard green', 'xkcd:lichen', 'xkcd:pale green', 'xkcd:blue green', 'xkcd:bluish purple', 'xkcd:lightish purple', 'xkcd:deep magenta', 'xkcd:burgundy', 'red' ] out = masks(self.dempath, self.db_convert, plotorder=self.plotorder, plotlabels=self.plotlabels) self.dem = out['dem'] self.veg_type = out['veg_type'] self.masks = out['masks'] self.nrows = out['nrows'] self.ncols = out['ncols'] self.plotorder = out['plotorder'] self.labels = out['labels'] for log in out['logger']: self.tmp_log.append(log) # Establish database connection self.basins, cnx, out = connect(sqlite=self.sqlite, sql=self.mysql, plotorder=self.plotorder, user=self.db_user, password=self.db_password, host=self.db_host, port=self.db_port, convert=self.db_convert, add=self.add_basins) self.connector = cnx for log in out: self.tmp_log.append(log) if self.loglevel == 'DEBUG': for basin in self.basins: self.tmp_log.append(' {}: {}'.format(basin, self.basins[basin])) # Check snow.nc file location, get topo stats and water year sfile = os.path.join(self.run_dirs[0], 'snow.nc') if os.path.isfile(sfile): topo = get_topo_stats(sfile) self.snow_x = topo['x'] self.snow_y = topo['y'] self.pixel = int(topo['dv']) ncf = nc.Dataset(sfile) t = nc.num2date(ncf.variables['time'][0], ncf.variables['time'].units) ncf.close() self.wy = handle_year_stradling(t) + 1 else: print( '\nGiven config options, expecting to find:\n {}\nto load topo ' 'stats but is not a valid file\nCheck config [run] options, see ' 'CoreConfig.ini for details\n'.format(sfile)) raise Exception('{} not a valid file'.format(sfile)) # make the bins edges = np.arange(self.elev_bins[0], self.elev_bins[1] + self.elev_bins[2], self.elev_bins[2]) # use for definition self.edges = np.arange(self.elev_bins[0] - self.elev_bins[2], self.elev_bins[1], self.elev_bins[2]) v = self.properties if self.inputs_flag: v += self.inputs_variables # get variables that will be used in processing self.variables = AwsmInputsOutputs() self.variables.make_variables(v, self.edges, self.masks.keys()) if self.units == 'TAF': self.conversion_factor = ((self.pixel**2) * 0.000000810713194 * 0.001) self.depth_factor = 0.03937 self.dem = self.dem * 3.28 self.depthlbl = 'in' self.vollbl = self.units self.elevlbl = 'ft' self.snowdepthlbl = 'in' if self.units == "SI": self.conversion_factor = ( (self.pixel**2) * 0.000000810713194) * 1233.48 / 1e9 self.depth_factor = 0.1 self.depthlbl = 'cm' self.vollbl = 'M$M^3$' self.elevlbl = 'm' self.snowdepthlbl = 'cm' if self.units == "AWSM": self.conversion_factor = ( (self.pixel**2) * 0.000000810713194) * 1233.48 self.depth_factor = 1 self.depthlbl = 'mm' self.vollbl = '$M^3$' self.elevlbl = 'm' self.snowdepthlbl = 'm' self.ixd = np.digitize(self.dem, edges) self.xlims = (0, len(edges)) if self.loglevel == 'DEBUG' and self.log_to_file is not True: print('Reading files in {}...'.format( self.run_dirs[0].split('runs')[0])) results = outputs(self.run_dirs, self.wy, self.properties, self.start_date, self.end_date, None, self.loglevel) out = results['outputs'] all_dirs = results['dirs'] dirs = results['run_dirs'] rdict = results['rdict'] log = results['log'] # If there was an error parsing files catch and log it if out == [] and all_dirs == [] and 'not a valid file' in log[-1]: self.tmp_log.append(log[-1]) if self.start_date is not None and self.end_date is not None: ext_shr = (self.directory + '_' + self.start_date.date().strftime("%Y%m%d") + '_' + self.end_date.date().strftime("%Y%m%d")) self.figs_path = os.path.join(self.save_path, '{}/'.format(ext_shr)) if external_logger == None: createLog(self) else: self._logger = external_logger raise Exception(log[-1]) for l in log: self.tmp_log.append(l) if out['dates'] == []: raise Exception( 'Supplied [run] directory, start_date, and end_date ' 'give no valid snow files') self.outputs = out self.run_dirs = dirs self.all_dirs = all_dirs self.rundirs_dict = rdict self.all_dirs_flt = deepcopy(all_dirs) if self.start_date is not None and self.end_date is None: self.end_date = self.outputs['dates'][-1] self.tmp_log.append(' Config options [run] end_date ' 'not specified, assigning ' '{} and {}'.format(self.start_date, self.end_date)) self.ixs = 0 self.ixe = len(self.outputs['dates']) - 1 # Otherwise, get closest dates and make indices else: self.start_date = self.outputs['dates'][0] self.end_date = self.outputs['dates'][-1] self.ixs = 0 self.ixe = len(self.outputs['dates']) - 1 if ((self.start_date.date() < self.outputs['dates'][0].date()) or (self.end_date.date() > self.outputs['dates'][-1].date())): raise Exception( 'ERROR! Config option [run] start_date or end_date ' 'outside of date range found in [run] directory') # Since model outputs at 23:00, step the figure and report dates to # show 00:00 the next day (unless start of water year) if self.start_date == datetime(self.wy - 1, 10, 1, 23, 0, 0): self.report_start = self.start_date else: self.report_start = self.start_date + timedelta(hours=1) # Copy the config file where figs will be saved # use directory if only plotting figures from database and don't # have start_date, end_date extf = os.path.splitext(os.path.split(self.config_file)[1]) ext_shr = (self.directory + '_' + self.start_date.date().strftime("%Y%m%d") + '_' + self.end_date.date().strftime("%Y%m%d")) self.figs_path = os.path.join(self.save_path, '{}/'.format(ext_shr)) # Get outputs for flights if self.flt_flag: file = self.update_file p = nc.Dataset(file, 'r') if self.update_numbers is None: times = p.variables['time'][:] else: if sum([ x > len(p.variables['time']) for x in self.update_numbers ]) > 0: self.tmp_log.append( ' Value in [plots] update_numbers out of ' 'range, max is {}, flight update figs ' 'being set to False'.format(len(p.variables['time']))) times = [] self.flt_flag = False else: times = p.variables['time'][self.update_numbers] p.close() flight_dates = [] pre_flight_dates = [] for time in times: wydate = calculate_date_from_wyhr(int(time), self.wy) pre_wydate = calculate_date_from_wyhr(int(time - 24), self.wy) flight_dates = np.append(flight_dates, wydate) pre_flight_dates = np.append(pre_flight_dates, pre_wydate) if self.loglevel == 'DEBUG' and self.log_to_file is not True: print('Reading files in {} for flight updates' '...'.format(self.run_dirs[0].split('runs')[0])) results = outputs(self.all_dirs_flt, self.wy, self.properties, None, None, flight_dates, self.loglevel) self.flight_outputs = results['outputs'] self.run_dirs_flt = results['run_dirs'] self.flt_rundirs_dict = results['rdict'] self.flight_diff_dates = results['outputs']['dates'] self.pre_flight_outputs = results['outputs'] results = outputs(self.all_dirs_flt, self.wy, self.properties, None, None, pre_flight_dates, self.loglevel) self.pre_flight_outputs = results['outputs'] # If there are no flights in the period, set to false for the flight # difference figure and report if not self.run_dirs_flt: self.flt_flag = False self.tmp_log.append( ' Config option [plots] update_file was ' 'supplied, but no snow.nc files were found in ' '[run] directory that fit the date range, no ' 'flight difference figure will be made') else: self.flight_diff_dates = None self.report_date = self.end_date + timedelta(hours=1) parts = self.report_name.split('.') self.report_name = (parts[0] + self.report_date.date().strftime("%Y%m%d") + '.' + parts[1]) if not os.path.exists(self.figs_path): os.makedirs(self.figs_path) config_copy = '{}{}{}'.format(self.figs_path, ext_shr, extf[1]) generate_config(self.ucfg, config_copy) if external_logger == None: createLog(self) else: self._logger = external_logger if self.inputs_basins is None: self.inputs_basins = [self.plotorder[0]]
def __init__(self, config, external_logger=None): """ Initialize the model, read config file, start and end date, and logging """ # read the config file and store if isinstance(config, str): if not os.path.isfile(config): raise Exception( 'Configuration file does not exist --> {}'.format(config)) self.configFile = config # Read in the original users config ucfg = get_user_config(config, modules='smrf') elif isinstance(config, UserConfig): ucfg = config self.configFile = config.filename else: raise Exception('Config passed to SMRF is neither file name nor ' ' UserConfig instance') # start logging if external_logger is None: self.smrf_logger = logger.SMRFLogger(ucfg.cfg['system']) self._logger = logging.getLogger(__name__) else: self._logger = external_logger # add the title self.title(2) # Make the output directory if it do not exist out = ucfg.cfg['output']['out_location'] os.makedirs(out, exist_ok=True) # Check the user config file for errors and report issues if any self._logger.info("Checking config file for issues...") warnings, errors = check_config(ucfg) print_config_report(warnings, errors, logger=self._logger) self.ucfg = ucfg self.config = self.ucfg.cfg # Exit SMRF if config file has errors if len(errors) > 0: self._logger.error("Errors in the config file. See configuration" " status report above.") sys.exit() # Write the config file to the output dir full_config_out = abspath(join(out, 'config.ini')) self._logger.info("Writing config file with full options.") generate_config(self.ucfg, full_config_out) # Process the system variables for k, v in self.config['system'].items(): setattr(self, k, v) self._setup_date_and_time() # need to align date time if 'date_method_start_decay' in self.config['albedo'].keys(): self.config['albedo']['date_method_start_decay'] = \ self.config['albedo']['date_method_start_decay'].replace( tzinfo=self.time_zone) self.config['albedo']['date_method_end_decay'] = \ self.config['albedo']['date_method_end_decay'].replace( tzinfo=self.time_zone) # if a gridded dataset will be used self.gridded = False self.forecast_flag = False self.hrrr_data_timestep = False if 'gridded' in self.config: self.gridded = True if self.config['gridded']['data_type'] in ['hrrr_grib']: self.hrrr_data_timestep = \ self.config['gridded']['hrrr_load_method'] == 'timestep' now = datetime.now().astimezone(self.time_zone) if ((self.start_date > now and not self.gridded) or (self.end_date > now and not self.gridded)): raise ValueError("A date set in the future can only be used with" " WRF generated data!") self.distribute = {} if self.config['system']['qotw']: self._logger.info(getqotw()) # Initialize the distribute dict self._logger.info('Started SMRF --> %s' % now) self._logger.info('Model start --> %s' % self.start_date) self._logger.info('Model end --> %s' % self.end_date) self._logger.info('Number of time steps --> %i' % self.time_steps)
def __init__(self, config, external_logger=None): """ Initialize the model, read config file, start and end date, and logging """ # read the config file and store if isinstance(config, str): if not os.path.isfile(config): raise Exception( 'Configuration file does not exist --> {}'.format(config)) configFile = config # Read in the original users config ucfg = get_user_config(config, modules='smrf') elif isinstance(config, UserConfig): ucfg = config configFile = config.filename else: raise Exception('Config passed to SMRF is neither file name nor ' ' UserConfig instance') # start logging if external_logger == None: if 'log_level' in ucfg.cfg['system']: loglevel = ucfg.cfg['system']['log_level'].upper() else: loglevel = 'INFO' numeric_level = getattr(logging, loglevel, None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) # setup the logging logfile = None if ucfg.cfg['system']['log_file'] != None: logfile = ucfg.cfg['system']['log_file'] if not os.path.isabs(logfile): logfile = abspath( join(dirname(configFile), ucfg.cfg['system']['log_file'])) if not os.path.isdir(dirname(logfile)): os.makedirs(dirname(logfile)) if not os.path.isfile(logfile): with open(logfile, 'w+') as f: f.close() fmt = '%(levelname)s:%(name)s:%(message)s' if logfile is not None: logging.basicConfig(filename=logfile, level=numeric_level, filemode='w+', format=fmt) else: logging.basicConfig(level=numeric_level) coloredlogs.install(level=numeric_level, fmt=fmt) self._loglevel = numeric_level self._logger = logging.getLogger(__name__) else: self._logger = external_logger # add the title title = self.title(2) for line in title: self._logger.info(line) out = ucfg.cfg['output']['out_location'] # Make the tmp and output directories if they do not exist makeable_dirs = [out, join(out, 'tmp')] for path in makeable_dirs: if not os.path.isdir(path): try: self._logger.info("Directory does not exist, Creating:\n{}" "".format(path)) os.makedirs(path) except OSError as e: raise e self.temp_dir = path # Check the user config file for errors and report issues if any self._logger.info("Checking config file for issues...") warnings, errors = check_config(ucfg) print_config_report(warnings, errors, logger=self._logger) self.ucfg = ucfg self.config = self.ucfg.cfg # Exit SMRF if config file has errors if len(errors) > 0: self._logger.error("Errors in the config file. See configuration" " status report above.") sys.exit() # Write the config file to the output dir no matter where the project is full_config_out = abspath(join(out, 'config.ini')) self._logger.info("Writing config file with full options.") generate_config(self.ucfg, full_config_out) # Process the system variables for k, v in self.config['system'].items(): setattr(self, k, v) os.environ['WORKDIR'] = self.temp_dir # Get the time section utils self.start_date = pd.to_datetime(self.config['time']['start_date']) self.end_date = pd.to_datetime(self.config['time']['end_date']) # Get the timesetps correctly in the time zone d = data.mysql_data.date_range( self.start_date, self.end_date, timedelta(minutes=int(self.config['time']['time_step']))) tzinfo = pytz.timezone(self.config['time']['time_zone']) self.date_time = [di.replace(tzinfo=tzinfo) for di in d] self.time_steps = len(self.date_time) # need to align date time if 'date_method_start_decay' in self.config['albedo'].keys(): self.config['albedo']['date_method_start_decay'] = \ self.config['albedo']['date_method_start_decay'].replace(tzinfo=tzinfo) self.config['albedo']['date_method_end_decay'] = \ self.config['albedo']['date_method_end_decay'].replace(tzinfo=tzinfo) # if a gridded dataset will be used self.gridded = False self.forecast_flag = False if 'gridded' in self.config: self.gridded = True if self.config['gridded']['data_type'] in [ 'hrrr_netcdf', 'hrrr_grib' ]: self.forecast_flag = self.config['gridded'][ 'hrrr_forecast_flag'] # hours from start of day self.day_hour = self.start_date - pd.to_datetime( d[0].strftime("%Y%m%d")) self.day_hour = int(self.day_hour / np.timedelta64(1, 'h')) if ((self.start_date > datetime.now() and not self.gridded) or (self.end_date > datetime.now() and not self.gridded)): raise ValueError("A date set in the future can only be used with" " WRF generated data!") self.distribute = {} if self.config['system']['qotw']: self._logger.info(getqotw()) # Initialize the distribute dict self._logger.info('Started SMRF --> %s' % datetime.now()) self._logger.info('Model start --> %s' % self.start_date) self._logger.info('Model end --> %s' % self.end_date) self._logger.info('Number of time steps --> %i' % self.time_steps)
def backup_input(data, config_obj): """ Backs up input data files so a user can rerun a run with the exact data used for a run. Args: data: Pandas dataframe containing the station data config_obj: The config object produced by inicheck """ # mask copy backup_config_obj = copy.deepcopy(config_obj) # Make the output dir backup_dir = os.path.join(backup_config_obj.cfg['output']['out_location'], 'input_backup') if not os.path.isdir(backup_dir): os.mkdir(backup_dir) # Check config file for csv section and remove alternate data form config if 'csv' not in backup_config_obj.cfg.keys(): backup_config_obj.cfg['csv'] = {} # With a new section added, we need to remove the other data sections # backup_config_obj.apply_recipes() if 'stations' in backup_config_obj.cfg.keys(): if 'client' in backup_config_obj.cfg['stations']: del backup_config_obj.cfg['stations']['client'] # Output station data to CSV csv_var = [ 'metadata', 'air_temp', 'vapor_pressure', 'precip', 'wind_speed', 'wind_direction', 'cloud_factor' ] for k in csv_var: fname = os.path.join(backup_dir, k + '.csv') v = getattr(data, k) v.to_csv(fname) # Adjust and output the inifile backup_config_obj.cfg['csv'][k] = fname # Copy topo files over to backup ignore = [ 'northern_hemisphere', 'gradient_method', 'sky_view_factor_angles' ] for s in backup_config_obj.cfg['topo'].keys(): src = backup_config_obj.cfg['topo'][s] # make not a list if lenth is 1 if isinstance(src, list): src = mk_lst(src, unlst=True) # Avoid attempring to copy files that don't exist if s not in ignore and src is not None: dst = os.path.join(backup_dir, os.path.basename(src)) backup_config_obj.cfg["topo"][s] = dst copyfile(src, dst) # We dont want to backup the backup backup_config_obj.cfg['output']['input_backup'] = False # Output inifile generate_config(backup_config_obj, os.path.join(backup_dir, 'backup_config.ini'))