def make_vertical_plot(config_file): """ Main function in snowplot to interpret config files and piece together the plot users describe in the config file Args: config_file: config file in .ini format and can be checked with inicheck """ # Get the cfg ucfg = get_user_config(config_file, modules=['snowplot']) warnings, errors = check_config(ucfg) print_config_report(warnings, errors) if len(errors) > 0: print("Errors in config file. Check report above.") sys.exit() # outut a config file out = ucfg.cfg['output']['output_dir'] if not isdir(out): mkdir(out) generate_config(ucfg, join(out, 'config_full.ini')) # Grab a copy of the config dictionary cfg = ucfg.cfg data = {} # gather all the templates for creating profiles profile_classes = get_checkers(module='snowplot.profiles', keywords='profile') # Create a map of the class names to the config names requested_profiles = OrderedDict() for v in cfg.keys(): if v not in __non_data_sections__: k = v.replace('_', '').lower() requested_profiles[k] = v # Create the profile objects and prerpare to add them to the figure for profile_name, cls in profile_classes.items(): if profile_name in requested_profiles.keys(): name = requested_profiles[profile_name] log.info("Building {} profile".format(name)) # Add it to the dictionary of data data[profile_name] = cls(**cfg[name]) # Build the final figure build_figure(data, cfg)
def check(config, logger): """Check the config file for problems Args: config (UserConfig): UserConfig object to check logger (logger): logger instance to send messages to """ # Check the user config file for errors and report issues if any logger.info("Checking config file for issues...") warnings, errors = check_config(config) print_config_report(warnings, errors, logger=logger) # Exit SMRF if config file has errors if len(errors) > 0: logger.error("Errors in the config file. See configuration" " status report above.") sys.exit()
def read_config_file(self, config_file): """ Reads in the user's config file and checks Args: config_file: either a path or a UserConfig instance """ # read the config file and store if isinstance(config_file, str): if not os.path.isfile(config_file): raise Exception( 'Configuration file does not exist --> {}'.format( config_file)) # Get the master config file mcfg_dir = os.path.abspath(os.path.dirname(__file__)) master_config = os.path.join(mcfg_dir, self.CORE_CONFIG) mcfg = MasterConfig(path=master_config) # user config file ucfg = get_user_config(config_file, mcfg=mcfg) elif isinstance(config_file, UserConfig): ucfg = config_file config_file = config_file.filename else: raise Exception('Config passed to PySnobal is neither file ' 'name or UserConfig instance') # Check the config file warnings, errors = check_config(ucfg) print_config_report(warnings, errors) self.ucfg = ucfg self.config = self.ucfg.cfg # Exit Pysnobal if config file has errors if len(errors) > 0: print("Errors in the config file. See configuration" " status report above.") sys.exit()
def read_config(self, config): if isinstance(config, str): if not os.path.isfile(config): raise Exception('Configuration file does not exist --> {}' .format(config)) configFile = config try: combined_mcfg = MasterConfig(modules=['smrf', 'awsm']) # Read in the original users config self.ucfg = get_user_config(configFile, mcfg=combined_mcfg) self.configFile = configFile except UnicodeDecodeError as e: print(e) raise Exception(('The configuration file is not encoded in ' 'UTF-8, please change and retry')) elif isinstance(config, UserConfig): self.ucfg = config else: raise Exception("""Config passed to AWSM is neither file """ """name nor UserConfig instance""") warnings, errors = check_config(self.ucfg) if len(errors) > 0: print_config_report(warnings, errors) print("Errors in the config file. " "See configuration status report above.") sys.exit() elif len(warnings) > 0: print_config_report(warnings, errors) self.config = self.ucfg.cfg
def __init__(self, config): """ Initialize the model, read config file, start and end date, and logging Args: config: string path to the config file or inicheck UserConfig instance """ # read the config file and store awsm_mcfg = MasterConfig(modules='awsm') smrf_mcfg = MasterConfig(modules='smrf') if isinstance(config, str): if not os.path.isfile(config): raise Exception('Configuration file does not exist --> {}' .format(config)) configFile = config try: combined_mcfg = MasterConfig(modules=['smrf', 'awsm']) # Read in the original users config self.ucfg = get_user_config(configFile, mcfg=combined_mcfg) self.configFile = configFile except UnicodeDecodeError as e: print(e) raise Exception(('The configuration file is not encoded in ' 'UTF-8, please change and retry')) elif isinstance(config, UserConfig): self.ucfg = config configFile = '' else: raise Exception( 'Config passed to AWSM is neither file name nor UserConfig instance') # get the git version self.gitVersion = awsm_utils.getgitinfo() # create blank log and error log because logger is not initialized yet self.tmp_log = [] self.tmp_err = [] self.tmp_warn = [] # Check the user config file for errors and report issues if any self.tmp_log.append("Checking config file for issues...") warnings, errors = check_config(self.ucfg) print_config_report(warnings, errors) self.config = self.ucfg.cfg # Exit AWSM if config file has errors if len(errors) > 0: print("Errors in the config file. " "See configuration status report above.") # sys.exit() # ################## Decide which modules to run ##################### self.do_smrf = self.config['awsm master']['run_smrf'] #self.do_isnobal = self.config['awsm master']['run_isnobal'] self.model_type = self.config['awsm master']['model_type'] # self.do_smrf_ipysnobal = \ # self.config['awsm master']['run_smrf_ipysnobal'] # self.do_ipysnobal = self.config['awsm master']['run_ipysnobal'] self.do_forecast = False if 'gridded' in self.config and self.do_smrf: self.do_forecast = self.config['gridded']['hrrr_forecast_flag'] # WARNING: The value here is inferred in SMRF.data.loadGrid. A # change here requires a change there self.n_forecast_hours = 18 # Options for converting files self.do_make_in = self.config['awsm master']['make_in'] self.do_make_nc = self.config['awsm master']['make_nc'] # do report? # self.do_report = self.config['awsm master']['do_report'] self.snowav_config = self.config['awsm master']['snowav_config'] # options for masking isnobal self.mask_isnobal = self.config['awsm master']['mask_isnobal'] # prompt for making directories self.prompt_dirs = self.config['awsm master']['prompt_dirs'] # store smrf version if running smrf self.smrf_version = smrf.__version__ # ################ Time information ################## self.start_date = pd.to_datetime(self.config['time']['start_date']) self.end_date = pd.to_datetime(self.config['time']['end_date']) self.time_step = self.config['time']['time_step'] self.tmz = self.config['time']['time_zone'] self.tzinfo = pytz.timezone(self.config['time']['time_zone']) # date to use for finding wy tmp_date = self.start_date.replace(tzinfo=self.tzinfo) tmp_end_date = self.end_date.replace(tzinfo=self.tzinfo) # find water year hour of start and end date self.start_wyhr = int(utils.water_day(tmp_date)[0]*24) self.end_wyhr = int(utils.water_day(tmp_end_date)[0]*24) # find start of water year tmpwy = utils.water_day(tmp_date)[1] - 1 self.wy_start = pd.to_datetime('{:d}-10-01'.format(tmpwy)) # ################ Store some paths from config file ################## # path to the base drive (i.e. /data/blizzard) if self.config['paths']['path_dr'] is not None: self.path_dr = os.path.abspath(self.config['paths']['path_dr']) else: print('No base path to drive given. Exiting now!') sys.exit() # name of your basin (i.e. Tuolumne) self.basin = self.config['paths']['basin'] # water year of run self.wy = utils.water_day(tmp_date)[1] # if the run is operational or not self.isops = self.config['paths']['isops'] # name of project if not an operational run self.proj = self.config['paths']['proj'] # check for project description self.desc = self.config['paths']['desc'] # find style for folder date stamp self.folder_date_style = self.config['paths']['folder_date_style'] # setting to output in seperate daily folders self.daily_folders = self.config['awsm system']['daily_folders'] if self.daily_folders and not self.run_smrf_ipysnobal: raise ValueError('Cannot run daily_folders with anything other' ' than run_smrf_ipysnobal') if self.do_forecast: self.tmp_log.append('Forecasting set to True') # self.fp_forecastdata = self.config['gridded']['wrf_file'] # if self.fp_forecastdata is None: # self.tmp_err.append('Forecast set to true, ' # 'but no grid file given') # print("Errors in the config file. See configuration " # "status report above.") # print(self.tmp_err) # sys.exit() if self.config['system']['threading']: # Can't run threaded smrf if running forecast_data self.tmp_err.append('Cannot run SMRF threaded with' ' gridded input data') print(self.tmp_err) sys.exit() # Time step mass thresholds for iSnobal self.mass_thresh = [] self.mass_thresh.append(self.config['grid']['thresh_normal']) self.mass_thresh.append(self.config['grid']['thresh_medium']) self.mass_thresh.append(self.config['grid']['thresh_small']) # threads for running iSnobal self.ithreads = self.config['awsm system']['ithreads'] # how often to output form iSnobal self.output_freq = self.config['awsm system']['output_frequency'] # number of timesteps to run if ou don't want to run the whole thing self.run_for_nsteps = self.config['awsm system']['run_for_nsteps'] # pysnobal output variables self.pysnobal_output_vars = self.config['awsm system']['variables'] self.pysnobal_output_vars = [wrd.lower() for wrd in self.pysnobal_output_vars] # snow and emname self.snow_name = self.config['awsm system']['snow_name'] self.em_name = self.config['awsm system']['em_name'] # options for restarting iSnobal self.restart_crash = False if self.config['isnobal restart']['restart_crash']: self.restart_crash = True # self.new_init = self.config['isnobal restart']['new_init'] self.depth_thresh = self.config['isnobal restart']['depth_thresh'] self.restart_hr = \ int(self.config['isnobal restart']['wyh_restart_output']) self.restart_folder = self.config['isnobal restart']['output_folders'] # iSnobal active layer self.active_layer = self.config['grid']['active_layer'] # if we are going to run ipysnobal with smrf if self.model_type in ['ipysnobal', 'smrf_ipysnobal']: self.ipy_threads = self.ithreads self.ipy_init_type = \ self.config['files']['init_type'] self.forcing_data_type = \ self.config['ipysnobal']['forcing_data_type'] # parameters needed for restart procedure self.restart_run = False if self.config['isnobal restart']['restart_crash']: self.restart_run = True # find restart hour datetime reset_offset = pd.to_timedelta(self.restart_hr, unit='h') # set a new start date for this run self.restart_date = self.wy_start + reset_offset self.tmp_log.append('Restart date is {}'.format(self.start_date)) # read in update depth parameters self.update_depth = False if 'update depth' in self.config: self.update_depth = self.config['update depth']['update'] if self.update_depth: self.update_file = self.config['update depth']['update_file'] self.update_buffer = self.config['update depth']['buffer'] self.flight_numbers = self.config['update depth']['flight_numbers'] # if flights to use is not list, make it a list if self.flight_numbers is not None: if not isinstance(self.flight_numbers, list): self.flight_numbers = [self.flight_numbers] # list of sections releated to AWSM # These will be removed for smrf config self.sec_awsm = awsm_mcfg.cfg.keys() self.sec_smrf = smrf_mcfg.cfg.keys() # Make rigid directory structure self.mk_directories() # ################ Topo data for iSnobal ################## # get topo stats self.csys = self.config['grid']['csys'].upper() self.nbits = int(self.config['grid']['nbits']) self.soil_temp = self.config['soil_temp']['temp'] # get topo class self.topo = mytopo(self.config['topo'], self.mask_isnobal, self.model_type, self.csys, self.pathdd) # ################ Generate config backup ################## # if self.config['output']['input_backup']: # set location for backup and output backup of awsm sections config_backup_location = \ os.path.join(self.pathdd, 'awsm_config_backup.ini') generate_config(self.ucfg, config_backup_location) # create log now that directory structure is done self.createLog() # if we have a model, initialize it if self.model_type is not None: self.myinit = modelInit(self._logger, self.config, self.topo, self.start_wyhr, self.pathro, self.pathrr, self.pathinit, self.wy_start)
def __init__(self, config): """Katana class created to wrap all functionality needed to run WindNinja in the context of the USDA ARS snow-water supply modeling workflow Arguments: config {string} -- path to the config file or an inicheck UserConfig object """ if isinstance(config, str): if not os.path.isfile(config): raise Exception( 'Configuration file does not exist --> {}'.format(config)) try: # Read in the original users config self.ucfg = get_user_config(config, modules='katana') except UnicodeDecodeError as e: print(e) raise Exception(('The configuration file is not encoded in ' 'UTF-8, please change and retry')) elif isinstance(config, UserConfig): self.ucfg = config else: raise Exception('Config passed to Katana is neither file name nor \ UserConfig instance') self.config_file = self.ucfg.filename warnings, errors = check_config(self.ucfg) print_config_report(warnings, errors) self.config = self.ucfg.cfg if len(errors) > 0: raise Exception("Error in config file. Check report above.") self.start_timing = datetime.now() ################################################ # Start parsing the arguments ################################################ self.parse_config() ################################################ # Create logger ################################################ self.create_log() ################################################ # Initialize the topo ################################################ self.topo = Topo(self.config) ################################################ # Initialize the input data ################################################ self.initialize_input_data() self._logger.debug('Katana initialized')
def __init__(self, core_config, config_file, basin=None): """Read in docker-airflow config. """ mcfg = MasterConfig(path=core_config) ucfg = get_user_config(config_file, mcfg=mcfg) ucfg.apply_recipes() ucfg = cast_all_variables(ucfg, ucfg.mcfg) warnings, errors = check_config(ucfg) if errors != [] or warnings != []: print_config_report(warnings, errors) # from basin_arguments section self.args = { "owner": ucfg.cfg['basin_arguments']['owner'], "depends_on_past": ucfg.cfg['basin_arguments']['depends_on_past'], "start_date": ucfg.cfg['basin_arguments']['start_date'], "email": ucfg.cfg['basin_arguments']['email'], "email_on_failure": ucfg.cfg['basin_arguments']['email_on_failure'], "email_on_retry": ucfg.cfg['basin_arguments']['email_on_retry'], "retries": ucfg.cfg['basin_arguments']['retries'], "retry_delay": timedelta(seconds=ucfg.cfg['basin_arguments']['retry_delay']) } self.snowav_args = { "owner": ucfg.cfg['snowav_arguments']['owner'], "depends_on_past": ucfg.cfg['snowav_arguments']['depends_on_past'], "start_date": ucfg.cfg['snowav_arguments']['start_date'], "email": ucfg.cfg['snowav_arguments']['email'], "email_on_failure": ucfg.cfg['snowav_arguments']['email_on_failure'], "email_on_retry": ucfg.cfg['snowav_arguments']['email_on_retry'], "retries": ucfg.cfg['snowav_arguments']['retries'], "retry_delay": timedelta(seconds=ucfg.cfg['snowav_arguments']['retry_delay']) } # settings aimage = ucfg.cfg['settings']['awsm_image'] atag = ucfg.cfg['settings']['awsm_tag'] kimage = ucfg.cfg['settings']['katana_image'] ktag = ucfg.cfg['settings']['katana_tag'] simage = ucfg.cfg['settings']['snowav_image'] stag = ucfg.cfg['settings']['snowav_tag'] self.settings = { "awsm_image": aimage + ':' + atag, "katana_image": kimage + ':' + ktag, "forecast_path": ucfg.cfg['settings']['forecast_path'], "geojson": ucfg.cfg['settings']['geojson'], "docker_call_backup": ucfg.cfg['settings']['docker_call_backup'], "windninja_nthreads": ucfg.cfg['settings']['windninja_nthreads'], "katana_zone_letter": ucfg.cfg['settings']['katana_zone_letter'], "katana_zone_number": ucfg.cfg['settings']['katana_zone_number'], "wy": ucfg.cfg['settings']['wy'], "backup_path": ucfg.cfg['settings']['backup_path'], "snowav_image": simage + ':' + stag } # basin sections if basin is not None: self.basin_settings = { "basin": ucfg.cfg[basin]['basin'], "base_path": ucfg.cfg[basin]['base_path'], "awsm_config": ucfg.cfg[basin]['awsm_config'], "retry_awsm_config": ucfg.cfg[basin]['retry_awsm_config'], "snowav_config": ucfg.cfg[basin]['snowav_config'], "katana_pixel": ucfg.cfg[basin]['katana_pixel'], "awsm_path": ucfg.cfg[basin]['awsm_path'], "topo_file": ucfg.cfg[basin]['topo_file'], "results_path": ucfg.cfg[basin]['results_path'] }
import matplotlib.pyplot as plt import plotables as pltz import time # from pylab import * # from scipy.optimize import curve_fit # Make command line to enter config file filepath_cfg = '/home/zachuhlmann/code/code/gdal_CL_utilities_config.ini' filepath_mcfg = '/home/zachuhlmann/code/code/gdal_CL_utilities_master_config.ini' ucfg = get_user_config(filepath_cfg, master_files = filepath_mcfg, checking_later = False) warnings, errors = check_config(ucfg) print_config_report(warnings, errors) #checking_later allows not to crash with errors. cfg = ucfg.cfg #check that files exist in inicheck utils_obj = gdalUtils.Flags(cfg['files']['file_path_in_date1'], cfg['files']['file_path_in_date2'], cfg['files']['file_path_out']) utils_obj.clip_extent_overlap() utils_obj.make_diff_mat() name = cfg['obtain_difference_arrays']['name'] action = cfg['obtain_difference_arrays']['action'] operator = cfg['obtain_difference_arrays']['operator'] val = cfg['obtain_difference_arrays']['val']
def __init__(self, config_file, awsm=None, end_date=None): print('Reading {} and loading files...'.format(config_file)) self.config_file = config_file snowav_mcfg = MasterConfig(modules='snowav') ucfg = get_user_config(self.config_file, mcfg=snowav_mcfg) ucfg.apply_recipes() ucfg = cast_all_variables(ucfg, ucfg.mcfg) self.snowav_path = get_snowav_path() warnings, errors = check_config(ucfg) if errors != [] or warnings != []: print_config_report(warnings, errors) self.tmp_log = [] self.tmp_err = [] self.tmp_warn = [] self.proc_time_start = datetime.now() #################################################### # snowav # #################################################### self.loglevel = ucfg.cfg['snowav']['log_level'].upper() self.log_to_file = ucfg.cfg['snowav']['log_to_file'] self.save_path = ucfg.cfg['snowav']['save_path'] self.units = ucfg.cfg['snowav']['units'] self.elev_bins = ucfg.cfg['snowav']['elev_bins'] self.directory = ucfg.cfg['snowav']['directory'] self.dempath = ucfg.cfg['snowav']['dempath'] self.run_name = ucfg.cfg['snowav']['run_name'] self.plotorder = ucfg.cfg['snowav']['masks'] self.plotlabels = ucfg.cfg['snowav']['plotlabels'] self.report_only = ucfg.cfg['snowav']['report_only'] #################################################### # run # #################################################### self.dplcs = ucfg.cfg['run']['decimals'] self.start_date = ucfg.cfg['run']['start_date'] self.end_date = ucfg.cfg['run']['end_date'] if end_date is not None: self.end_date = end_date self.tmp_log.append(' Overriding config end_date with ' '{} given with snowav call'.format(end_date)) if self.end_date <= self.start_date: raise Exception( 'end_date {} earlier than start_date {}'.format( self.end_date, self.start_date)) if self.start_date is not None and self.end_date is not None: self.start_date = self.start_date self.end_date = self.end_date if self.start_date >= self.end_date: self.tmp_log.append(' Error: [run] start_date >= end_date') raise Exception('[run] start_date >= [run] end_date') else: self.tmp_log.append(' [run] start_date and/or end_date was not ' 'defined in config file, will be assigned ' 'by available dates in directory') self.all_subdirs = ucfg.cfg['run']['all_subdirs'] if (ucfg.cfg['run']['directory'] is None) and (awsm is not None): if self.all_subdirs is True: self.run_dirs = ([ awsm.pathr + s for s in os.listdir(awsm.pathr) if (os.path.isdir(awsm.pathr + s)) ]) else: self.run_dirs = awsm.pathr if type(self.run_dirs) != list: self.run_dirs = [self.run_dirs] else: directory = ucfg.cfg['run']['directory'] if len(directory) == 1: directory = directory[0] if self.all_subdirs is True: self.run_dirs = ([ directory + s for s in os.listdir(directory) if (os.path.isdir(directory + s)) ]) else: self.run_dirs = ucfg.cfg['run']['directory'] if type(self.run_dirs) != list: self.run_dirs = [self.run_dirs] self.run_dirs.sort() #################################################### # database #################################################### self.mysql = ucfg.cfg['database']['mysql'] self.db_user = ucfg.cfg['database']['user'] self.db_password = ucfg.cfg['database']['password'] self.db_host = ucfg.cfg['database']['host'] self.db_port = ucfg.cfg['database']['port'] self.db_convert = ucfg.cfg['database']['convert_ws'] self.add_basins = ucfg.cfg['database']['add_basins'] self.db_overwrite = ucfg.cfg['database']['overwrite'] self.properties = ucfg.cfg['database']['properties'] self.sqlite = ucfg.cfg['database']['sqlite'] base_bands = [ 'swi_z', 'evap_z', 'swe_z', 'depth', 'density', 'coldcont', 'precip_z' ] for band in base_bands: if band not in self.properties: self.tmp_log.append(' WARNING! Config option [database] ' 'properties does not contain ' '{}'.format(band)) if ((self.mysql is not None) and ((self.db_user is None) or (self.db_password is None) or (self.db_host is None) or (self.db_port is None))): raise Exception('If using config option [database] mysql, must ' 'also supply user, password, host, and port') if self.sqlite is not None: if not os.path.isdir(os.path.dirname(self.sqlite)): raise Exception('{} does not contain a valid base ' 'path'.format(self.sqlite)) self.sqlite = 'sqlite:///' + self.sqlite self.db_type = 'sqlite' if self.mysql is not None: raise Exception('Config option [database] section contains ' 'both "mysql" and "sqlite" entries, pick one.') else: self.db_type = 'sql' #################################################### # validate # #################################################### self.val_stns = ucfg.cfg['validate']['stations'] self.val_lbls = ucfg.cfg['validate']['labels'] self.val_client = ucfg.cfg['validate']['client'] self.wxdb_user = ucfg.cfg['validate']['user'] self.wxdb_password = ucfg.cfg['validate']['password'] self.wxdb_host = ucfg.cfg['validate']['host'] self.wxdb_port = ucfg.cfg['validate']['port'] self.point_values = ucfg.cfg['validate']['point_values'] self.point_values_csv = ucfg.cfg['validate']['point_values_csv'] self.point_values_date = ucfg.cfg['validate']['point_values_date'] self.point_values_properties = ucfg.cfg['validate'][ 'point_values_properties'] self.point_values_heading = ucfg.cfg['validate'][ 'point_values_heading'] self.point_values_settings = ucfg.cfg['validate'][ 'point_values_settings'] for n in range(0, 10): self.point_values_settings[n] = int(self.point_values_settings[n]) if self.point_values and self.point_values_csv is None: self.point_values = False self.tmp_log.append(' Config option [validate] point_values_csv ' 'was not supplied, point_values being set ' 'to False') if self.point_values and self.point_values_date is None: self.point_values = False self.tmp_log.append(' Config option [validate] point_values_date ' 'was not supplied, point_values being set ' 'to False') #################################################### # diagnostics # #################################################### self.diagnostics_flag = ucfg.cfg['diagnostics']['diagnostics'] self.diag_basins = ucfg.cfg['diagnostics']['basins'] self.diag_limit = ucfg.cfg['diagnostics']['limit'] if self.diagnostics_flag: if self.diag_basins is not None: for basin in self.diag_basins: if basin not in self.plotorder: self.tmp_log.append(' Config [diagnostics] basin: "{}"' ' does not match [snowav] masks: ' '"{}", diagnostics set to ' 'False'.format( basin, self.plotorder)) self.diagnostics_flag = False if 'snow_line' not in self.properties and self.diagnostics_flag: self.diagnostics_flag = False self.tmp_log.append(' Required properties in [database] properties' ' for [diagnostics] does not exist, setting ' 'diagnostics: False') self.inputs_flag = ucfg.cfg['diagnostics']['inputs_table'] self.inputs_variables = ucfg.cfg['diagnostics']['inputs_variables'] self.inputs_percentiles = ucfg.cfg['diagnostics']['inputs_percentiles'] self.inputs_methods = ucfg.cfg['diagnostics']['inputs_methods'] self.inputs_basins = ucfg.cfg['diagnostics']['inputs_basins'] if self.inputs_basins is not None and self.plotorder is not None: for basin in self.inputs_basins: if basin not in self.plotorder: self.tmp_log.append( ' Config option [diagnostics] ' 'inputs_basins: {} does not match what ' 'was supplied in [snowav] masks: {}, ' 'inputs set to ' 'False'.format(basin, self.plotorder)) self.inputs_flag = False if self.inputs_flag: s = [x + ', ' for x in self.inputs_variables] self.tmp_log.append(' Using variables {} for inputs ' 'summary'.format(''.join(s))) s = [x + ', ' for x in self.inputs_methods] self.tmp_log.append(' Using methods {} for inputs ' 'summary'.format(''.join(s))) #################################################### # plots # #################################################### self.dpi = ucfg.cfg['plots']['dpi'] self.depth_clip = ucfg.cfg['plots']['depth_clip'] self.clims_percent = ucfg.cfg['plots']['clims_percent'] self.subs_fig = ucfg.cfg['plots']['subs_fig'] self.density_flag = ucfg.cfg['plots']['density'] self.swi_flag = ucfg.cfg['plots']['swi'] self.current_image_flag = ucfg.cfg['plots']['current_image'] self.image_change_flag = ucfg.cfg['plots']['image_change'] self.flt_image_change_clims = ucfg.cfg['plots'][ 'flt_image_change_clims'] self.cold_content_flag = ucfg.cfg['plots']['cold_content'] self.swe_volume_flag = ucfg.cfg['plots']['swe_volume'] self.basin_total_flag = ucfg.cfg['plots']['basin_total'] self.stn_validate_flag = ucfg.cfg['plots']['stn_validate'] self.nash_sut_flag = ucfg.cfg['plots']['disp_nash_sut'] self.stns_file = ucfg.cfg['plots']['stns_file'] self.inputs_fig_flag = ucfg.cfg['plots']['inputs'] self.plots_inputs_variables = ucfg.cfg['plots']['inputs_variables'] self.compare_runs_flag = ucfg.cfg['plots']['compare_runs'] self.compare_run_names = ucfg.cfg['plots']['compare_run_names'] self.compare_run_labels = ucfg.cfg['plots']['compare_run_labels'] self.compare_run_wys = ucfg.cfg['plots']['compare_run_wys'] self.precip_depth_flag = ucfg.cfg['plots']['precip_depth'] self.basin_detail_flag = ucfg.cfg['plots']['basin_detail'] self.update_file = ucfg.cfg['plots']['update_file'] self.figsize = ucfg.cfg['plots']['fig_size'] self.write_properties = ucfg.cfg['plots']['write_properties'] self.point_values_flag = ucfg.cfg['plots']['point_values'] if self.flt_image_change_clims[0] < 0: self.flt_image_change_clims[0] = 0 if self.flt_image_change_clims[1] > 100: self.flt_image_change_clims[1] = 100 if (self.write_properties is not None and type(self.write_properties) != list): self.write_properties = [self.write_properties] numbers = ucfg.cfg['plots']['update_numbers'] if numbers is not None: if type(numbers) != list: numbers = [numbers] self.update_numbers = [x - 1 for x in numbers] else: self.update_numbers = None if (self.compare_runs_flag and ((self.compare_run_names is None) or (self.compare_run_labels is None) or self.compare_run_wys is None)): self.tmp_log.append(' Config option [plots] compare_runs set to ' 'True, but one of compare_run_names, ' 'compare_run_labels, or compare_run_wys is ' 'empty, setting compare_runs to False') self.compare_runs_flag = False if (self.compare_runs_flag and (len(self.compare_run_names) != len(self.compare_run_labels))): self.tmp_log.append(' Config option [plots] compare_runs set to ' 'True, must supply equal length ' 'compare_run_names and compare_run_labels, ' 'resetting compare_runs to False') self.compare_runs_flag = False if self.update_file is not None: self.flt_flag = True else: self.flt_flag = False if (self.stn_validate_flag and (self.val_client is None) or (self.val_stns is None) or (self.val_lbls is None) or (self.wxdb_user is None) or (self.wxdb_password is None)): self.tmp_log.append(' Config option [plots] stn_validate is being ' 'set to False') self.stn_validate_flag = False if len(self.point_values_settings) != 14: self.tmp_log.append(' Expected [validate] point_values_settings ' 'to have 14 values, point_values set to False') self.point_values_flag = False for var in self.plots_inputs_variables: if var not in self.inputs_variables: self.plots_inputs_variables.remove(var) self.tmp_log.append(' Config option [plots] inputs_variables ' 'value {} not present in [diagnostics] ' 'inputs_variables, being ' 'removed'.format(var)) #################################################### # report # #################################################### self.report_flag = ucfg.cfg['report']['report'] self.print_latex = ucfg.cfg['report']['print_latex'] self.report_name = ucfg.cfg['report']['file'] self.rep_title = ucfg.cfg['report']['title'] self.rep_path = ucfg.cfg['report']['save_path'] self.env_path = ucfg.cfg['report']['env_path'] self.templ_path = ucfg.cfg['report']['templ_path'] self.tex_file = ucfg.cfg['report']['tex_file'] self.summary_file = ucfg.cfg['report']['summary'] self.figs_tpl_path = ucfg.cfg['report']['figs_tpl_path'] self.flight_figs = ucfg.cfg['report']['flight_figs'] self.tables = ucfg.cfg['report']['tables'] self.report_diagnostics = ucfg.cfg['report']['diagnostics'] self.report_diagnostics_day = ucfg.cfg['report']['diagnostics_day'] self.rep_dplcs = ucfg.cfg['report']['decimals'] if (self.report_diagnostics and (not self.inputs_fig_flag or not self.diagnostics_flag)): self.tmp_log.append(" [report] diagnostics: True, but must also " "have [plots] inputs: True and [diagnostics] " "diagnostics: True, setting to False") self.report_diagnostics = False if self.report_diagnostics and self.report_diagnostics_day[0] != 'any': if (calendar.day_name[datetime.now().weekday()] not in self.report_diagnostics_day): self.report_diagnostics = False self.tmp_log.append(" Per [report] diagnostics_day: {}, " "setting diagnostics: " "False".format( self.report_diagnostics_day)) self.rep_swi_flag = ucfg.cfg['report']['swi'] if not self.swi_flag: self.rep_swi_flag = False self.rep_image_change_flag = ucfg.cfg['report']['image_change'] if not self.image_change_flag: self.rep_image_change_flag = False self.rep_cold_content_flag = ucfg.cfg['report']['cold_content'] if not self.cold_content_flag: self.rep_cold_content_flag = False self.rep_swe_volume_flag = ucfg.cfg['report']['swe_volume'] if not self.swe_volume_flag: self.rep_swe_volume_flag = False self.rep_basin_total_flag = ucfg.cfg['report']['basin_total'] if not self.basin_total_flag: self.rep_basin_total_flag = False self.rep_stn_validate_flag = ucfg.cfg['report']['stn_validate'] if not self.stn_validate_flag: self.rep_stn_validate_flag = False self.rep_compare_runs_flag = ucfg.cfg['report']['compare_runs'] if not self.compare_runs_flag: self.rep_compare_runs_flag = False self.rep_precip_depth_flag = ucfg.cfg['report']['precip_depth'] if not self.precip_depth_flag: self.rep_precip_depth_flag = False # check paths to see if they need default snowav path if self.env_path is None: self.env_path = os.path.abspath( os.path.join(snowav.__path__[0], "report/template/section_text")) if self.templ_path is None: self.templ_path = os.path.abspath( os.path.join(snowav.__path__[0], "report/template")) if self.summary_file is None: self.summary_file = os.path.abspath( os.path.join( snowav.__path__[0], "report/template/section_text/report_summary.txt")) if self.tex_file is None: self.tex_file = os.path.abspath( os.path.join(snowav.__path__[0], "report/template/snowav_report.text")) if self.figs_tpl_path is None: self.figs_tpl_path = os.path.abspath( os.path.join(snowav.__path__[0], "report/figs")) #################################################### # query # #################################################### self.query_flag = ucfg.cfg['query']['query'] self.q_basins = ucfg.cfg['query']['basins'] self.q_value = ucfg.cfg['query']['value'] self.q_run_name = ucfg.cfg['query']['run_name'] self.q_print_all_runs = ucfg.cfg['query']['print_all_runs'] self.q_start_date = ucfg.cfg['query']['start_date'] self.q_end_date = ucfg.cfg['query']['end_date'] self.q_total = ucfg.cfg['query']['total'] self.q_output = ucfg.cfg['query']['output'] self.q_csv_base_path = ucfg.cfg['query']['csv_base_path'] self.q_database = ucfg.cfg['query']['database'] #################################################### # inflow # #################################################### self.inflow_flag = ucfg.cfg['inflow']['inflow'] self.inflow_data = ucfg.cfg['inflow']['inflow_data'] self.summary_csv = ucfg.cfg['inflow']['summary_csv'] self.inflow_headings = ucfg.cfg['inflow']['inflow_headings'] self.basin_headings = ucfg.cfg['inflow']['basin_headings'] self.sheet_name = ucfg.cfg['inflow']['sheet_name'] self.skiprows = ucfg.cfg['inflow']['skiprows'] self.overwrite = ucfg.cfg['inflow']['overwrite'] self.file_base = ucfg.cfg['inflow']['file_base'] self.date_idx = ucfg.cfg['inflow']['date_idx'] self.convert = ucfg.cfg['inflow']['convert'] self.ucfg = ucfg
def __init__(self, config, external_logger=None): """ Initialize the model, read config file, start and end date, and logging """ # read the config file and store if isinstance(config, str): if not os.path.isfile(config): raise Exception( 'Configuration file does not exist --> {}'.format(config)) self.configFile = config # Read in the original users config ucfg = get_user_config(config, modules='smrf') elif isinstance(config, UserConfig): ucfg = config self.configFile = config.filename else: raise Exception('Config passed to SMRF is neither file name nor ' ' UserConfig instance') # start logging if external_logger is None: self.smrf_logger = logger.SMRFLogger(ucfg.cfg['system']) self._logger = logging.getLogger(__name__) else: self._logger = external_logger # add the title self.title(2) # Make the output directory if it do not exist out = ucfg.cfg['output']['out_location'] os.makedirs(out, exist_ok=True) # Check the user config file for errors and report issues if any self._logger.info("Checking config file for issues...") warnings, errors = check_config(ucfg) print_config_report(warnings, errors, logger=self._logger) self.ucfg = ucfg self.config = self.ucfg.cfg # Exit SMRF if config file has errors if len(errors) > 0: self._logger.error("Errors in the config file. See configuration" " status report above.") sys.exit() # Write the config file to the output dir full_config_out = abspath(join(out, 'config.ini')) self._logger.info("Writing config file with full options.") generate_config(self.ucfg, full_config_out) # Process the system variables for k, v in self.config['system'].items(): setattr(self, k, v) self._setup_date_and_time() # need to align date time if 'date_method_start_decay' in self.config['albedo'].keys(): self.config['albedo']['date_method_start_decay'] = \ self.config['albedo']['date_method_start_decay'].replace( tzinfo=self.time_zone) self.config['albedo']['date_method_end_decay'] = \ self.config['albedo']['date_method_end_decay'].replace( tzinfo=self.time_zone) # if a gridded dataset will be used self.gridded = False self.forecast_flag = False self.hrrr_data_timestep = False if 'gridded' in self.config: self.gridded = True if self.config['gridded']['data_type'] in ['hrrr_grib']: self.hrrr_data_timestep = \ self.config['gridded']['hrrr_load_method'] == 'timestep' now = datetime.now().astimezone(self.time_zone) if ((self.start_date > now and not self.gridded) or (self.end_date > now and not self.gridded)): raise ValueError("A date set in the future can only be used with" " WRF generated data!") self.distribute = {} if self.config['system']['qotw']: self._logger.info(getqotw()) # Initialize the distribute dict self._logger.info('Started SMRF --> %s' % now) self._logger.info('Model start --> %s' % self.start_date) self._logger.info('Model end --> %s' % self.end_date) self._logger.info('Number of time steps --> %i' % self.time_steps)
def __init__(self, config, external_logger=None): """ Initialize the model, read config file, start and end date, and logging """ # read the config file and store if isinstance(config, str): if not os.path.isfile(config): raise Exception( 'Configuration file does not exist --> {}'.format(config)) configFile = config # Read in the original users config ucfg = get_user_config(config, modules='smrf') elif isinstance(config, UserConfig): ucfg = config configFile = config.filename else: raise Exception('Config passed to SMRF is neither file name nor ' ' UserConfig instance') # start logging if external_logger == None: if 'log_level' in ucfg.cfg['system']: loglevel = ucfg.cfg['system']['log_level'].upper() else: loglevel = 'INFO' numeric_level = getattr(logging, loglevel, None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) # setup the logging logfile = None if ucfg.cfg['system']['log_file'] != None: logfile = ucfg.cfg['system']['log_file'] if not os.path.isabs(logfile): logfile = abspath( join(dirname(configFile), ucfg.cfg['system']['log_file'])) if not os.path.isdir(dirname(logfile)): os.makedirs(dirname(logfile)) if not os.path.isfile(logfile): with open(logfile, 'w+') as f: f.close() fmt = '%(levelname)s:%(name)s:%(message)s' if logfile is not None: logging.basicConfig(filename=logfile, level=numeric_level, filemode='w+', format=fmt) else: logging.basicConfig(level=numeric_level) coloredlogs.install(level=numeric_level, fmt=fmt) self._loglevel = numeric_level self._logger = logging.getLogger(__name__) else: self._logger = external_logger # add the title title = self.title(2) for line in title: self._logger.info(line) out = ucfg.cfg['output']['out_location'] # Make the tmp and output directories if they do not exist makeable_dirs = [out, join(out, 'tmp')] for path in makeable_dirs: if not os.path.isdir(path): try: self._logger.info("Directory does not exist, Creating:\n{}" "".format(path)) os.makedirs(path) except OSError as e: raise e self.temp_dir = path # Check the user config file for errors and report issues if any self._logger.info("Checking config file for issues...") warnings, errors = check_config(ucfg) print_config_report(warnings, errors, logger=self._logger) self.ucfg = ucfg self.config = self.ucfg.cfg # Exit SMRF if config file has errors if len(errors) > 0: self._logger.error("Errors in the config file. See configuration" " status report above.") sys.exit() # Write the config file to the output dir no matter where the project is full_config_out = abspath(join(out, 'config.ini')) self._logger.info("Writing config file with full options.") generate_config(self.ucfg, full_config_out) # Process the system variables for k, v in self.config['system'].items(): setattr(self, k, v) os.environ['WORKDIR'] = self.temp_dir # Get the time section utils self.start_date = pd.to_datetime(self.config['time']['start_date']) self.end_date = pd.to_datetime(self.config['time']['end_date']) # Get the timesetps correctly in the time zone d = data.mysql_data.date_range( self.start_date, self.end_date, timedelta(minutes=int(self.config['time']['time_step']))) tzinfo = pytz.timezone(self.config['time']['time_zone']) self.date_time = [di.replace(tzinfo=tzinfo) for di in d] self.time_steps = len(self.date_time) # need to align date time if 'date_method_start_decay' in self.config['albedo'].keys(): self.config['albedo']['date_method_start_decay'] = \ self.config['albedo']['date_method_start_decay'].replace(tzinfo=tzinfo) self.config['albedo']['date_method_end_decay'] = \ self.config['albedo']['date_method_end_decay'].replace(tzinfo=tzinfo) # if a gridded dataset will be used self.gridded = False self.forecast_flag = False if 'gridded' in self.config: self.gridded = True if self.config['gridded']['data_type'] in [ 'hrrr_netcdf', 'hrrr_grib' ]: self.forecast_flag = self.config['gridded'][ 'hrrr_forecast_flag'] # hours from start of day self.day_hour = self.start_date - pd.to_datetime( d[0].strftime("%Y%m%d")) self.day_hour = int(self.day_hour / np.timedelta64(1, 'h')) if ((self.start_date > datetime.now() and not self.gridded) or (self.end_date > datetime.now() and not self.gridded)): raise ValueError("A date set in the future can only be used with" " WRF generated data!") self.distribute = {} if self.config['system']['qotw']: self._logger.info(getqotw()) # Initialize the distribute dict self._logger.info('Started SMRF --> %s' % datetime.now()) self._logger.info('Model start --> %s' % self.start_date) self._logger.info('Model end --> %s' % self.end_date) self._logger.info('Number of time steps --> %i' % self.time_steps)