Exemplo n.º 1
0
def base_config():
    """Load the base config object"""

    master_config = os.path.join(
        Path(pysnobal.__file__).parent, 'pysnobal_core_config.ini')
    mcfg = MasterConfig(path=master_config)
    return get_user_config(config_file, mcfg=mcfg)
Exemplo n.º 2
0
def read(config):
    """Read an inicheck config file and return the user config

    Args:
        config (str or UserConfig): file name or Userconfig object

    Raises:
        Exception: If file does not exist
        Exception: config is not a file or UserConfig

    Returns:
        tuple: (Userconfig, config file name)
    """

    # read the config file and store
    if isinstance(config, str):
        if not os.path.isfile(config):
            raise Exception(
                'Configuration file does not exist --> {}'.format(config))
        configFile = config

        # Read in the original users config
        ucfg = get_user_config(config, modules='basin_setup')

    elif isinstance(config, UserConfig):
        ucfg = config
        configFile = config.filename

    else:
        raise Exception('Config passed to basin_setup is neither file name'
                        ' nor UserConfig instance')

    return ucfg, configFile
def main():
    repo = abspath(expanduser('~/projects/smrf'))

    branch = sys.argv[1]
    # Compare the full RME run test with station data gold files
    print(
        '\nAnalyzing gold file differences in the {} branch...'.format(branch))
    print('=======================================================')
    # Change to the master branch
    change_branch(repo, branch)

    # Setup the location of the model run data to check
    gold_dir = join(repo, 'tests', 'RME', 'gold')
    hrrr_gold_dir = join(repo, 'tests', 'RME', 'gold_hrrr')

    # Config file modifications
    cfg = {
        'output': {
            'out_location': 'run_output'
        },
        'system': {
            'log_file': 'log.txt'
        },
    }

    if branch == 'master':
        print("changing topo config section")
        cfg['topo'] = {
            'basin_lat': 43.06475,
            'basin_lon': -116.75395
        }  # From the new calculation

    # Run the model, analyze the results
    import smrf

    ucfg = get_user_config(join(gold_dir, 'gold_config.ini'), modules='smrf')
    produce_comparison(ucfg,
                       gold_dir,
                       cfg_mods=cfg,
                       results='{}_station_results'.format(branch))

    ucfg = get_user_config(join(hrrr_gold_dir, 'gold_config.ini'),
                           modules='smrf')
    produce_comparison(ucfg,
                       hrrr_gold_dir,
                       cfg_mods=cfg,
                       results='{}_hrrr_results'.format(branch))
Exemplo n.º 4
0
    def setUpClass(cls):
        super().setUpClass()
        cls.config_file = os.path.join(cls.test_dir, 'test_base_config.ini')

        # Base configuration
        cls.base_config = get_user_config(cls.config_file,
                                          modules=['smrf', 'awsm'])

        cls.run_dir = os.path.join(cls.test_dir, 'RME')
Exemplo n.º 5
0
    def setUpClass(cls):
        """
        Runs the short simulation over reynolds mountain east
        """

        cls.test_dir = os.path.abspath('tests/Lakes')
        cls.test_config = os.path.abspath('tests/config.ini')
        cls.out_dir = os.path.join(cls.test_dir, 'output')

        # read in the base configuration
        cls.base_config = get_user_config(cls.test_config, modules=['katana'])
Exemplo n.º 6
0
 def config(self, cfg_dict):
     s = f'[{self.section}]\n'
     for k, v in cfg_dict.items():
         s += f'{k}: {v}\n'
     f = join(dirname(__file__), 'config.ini')
     s += '[output]\n'
     with open(f, mode='w+') as fp:
         fp.write(s)
     ucfg = get_user_config(f, modules=['snowplot'])
     yield ucfg.cfg[self.section]
     if isfile(f):
         os.remove(f)
Exemplo n.º 7
0
def make_vertical_plot(config_file):
    """
    Main function in snowplot to interpret config files and piece together the
    plot users describe in the config file

    Args:
        config_file: config file in .ini format and can be checked with inicheck
    """

    # Get the cfg
    ucfg = get_user_config(config_file, modules=['snowplot'])
    warnings, errors = check_config(ucfg)

    print_config_report(warnings, errors)
    if len(errors) > 0:
        print("Errors in config file. Check report above.")
        sys.exit()

    # outut a config file
    out = ucfg.cfg['output']['output_dir']

    if not isdir(out):
        mkdir(out)
    generate_config(ucfg, join(out, 'config_full.ini'))

    # Grab a copy of the config dictionary
    cfg = ucfg.cfg
    data = {}

    # gather all the templates for creating profiles
    profile_classes = get_checkers(module='snowplot.profiles',
                                   keywords='profile')

    # Create a map of the class names to the config names
    requested_profiles = OrderedDict()
    for v in cfg.keys():
        if v not in __non_data_sections__:
            k = v.replace('_', '').lower()
            requested_profiles[k] = v

    # Create the profile objects and prerpare to add them to the figure
    for profile_name, cls in profile_classes.items():

        if profile_name in requested_profiles.keys():
            name = requested_profiles[profile_name]
            log.info("Building {} profile".format(name))
            # Add it to the dictionary of data
            data[profile_name] = cls(**cfg[name])

    # Build the final figure
    build_figure(data, cfg)
Exemplo n.º 8
0
    def setUp(self):
        """
        Runs the short simulation over reynolds mountain east
        """

        # check whether or not this is being ran as a single test or part of the suite
        base = os.path.dirname(smrf.__file__)
        self.test_dir = os.path.join(base, '../', 'tests')

        config_file = 'test_base_config.ini'
        config_file = os.path.join(self.test_dir, config_file)

        if not os.path.isfile(config_file):
            raise Exception('Configuration file not found for testing')

        self.config_file = config_file

        # read in the base configuration
        self.base_config = get_user_config(config_file, modules='smrf')
Exemplo n.º 9
0
    def read_config_file(self, config_file):
        """
        Reads in the user's config file and checks

        Args:
            config_file: either a path or a UserConfig instance
        """

        # read the config file and store
        if isinstance(config_file, str):
            if not os.path.isfile(config_file):
                raise Exception(
                    'Configuration file does not exist --> {}'.format(
                        config_file))

            # Get the master config file
            mcfg_dir = os.path.abspath(os.path.dirname(__file__))
            master_config = os.path.join(mcfg_dir, self.CORE_CONFIG)
            mcfg = MasterConfig(path=master_config)

            # user config file
            ucfg = get_user_config(config_file, mcfg=mcfg)

        elif isinstance(config_file, UserConfig):
            ucfg = config_file
            config_file = config_file.filename

        else:
            raise Exception('Config passed to PySnobal is neither file '
                            'name or UserConfig instance')

        # Check the config file
        warnings, errors = check_config(ucfg)
        print_config_report(warnings, errors)
        self.ucfg = ucfg
        self.config = self.ucfg.cfg

        # Exit Pysnobal if config file has errors
        if len(errors) > 0:
            print("Errors in the config file. See configuration"
                  " status report above.")
            sys.exit()
Exemplo n.º 10
0
    def config(self, cfg_dict, section):
        s = f'[{section}]\n'
        for k, v in cfg_dict.items():
            s += f'{k}: {v}\n'
        f = join(dirname(__file__), 'config.ini')
        s += '[output]\n'
        s += 'show_plot: False\n'
        s += 'dpi: 50\n'
        s += 'filename: figure.png\n'

        with open(f, mode='w+') as fp:
            fp.write(s)
        # Populate the config
        ucfg = get_user_config(f, modules=['snowplot'])
        # Write out the new config again
        generate_config(ucfg, f)
        yield f

        if isfile(f):
            os.remove(f)
Exemplo n.º 11
0
    def setUp(self):
        """
        Runs the short simulation over reynolds mountain east
        """

        # check whether or not this is being ran as a single test or part of the suite
        config_file = 'test_base_config.ini'
        if os.path.isfile(config_file):
            self.test_dir = ''
        elif os.path.isfile(os.path.join('tests', config_file)):
            config_file = os.path.join('tests', config_file)
            self.test_dir = 'tests'
        else:
            raise Exception('Configuration file not found for testing')

        self.config_file = config_file

        # read in the base configuration
        self.base_config = get_user_config(config_file,
                                           modules=['smrf', 'awsm'])
Exemplo n.º 12
0
    def read_config(self, config):
        if isinstance(config, str):
            if not os.path.isfile(config):
                raise Exception('Configuration file does not exist --> {}'
                                .format(config))
            configFile = config

            try:
                combined_mcfg = MasterConfig(modules=['smrf', 'awsm'])

                # Read in the original users config
                self.ucfg = get_user_config(configFile, mcfg=combined_mcfg)
                self.configFile = configFile

            except UnicodeDecodeError as e:
                print(e)
                raise Exception(('The configuration file is not encoded in '
                                 'UTF-8, please change and retry'))

        elif isinstance(config, UserConfig):
            self.ucfg = config

        else:
            raise Exception("""Config passed to AWSM is neither file """
                            """name nor UserConfig instance""")

        warnings, errors = check_config(self.ucfg)

        if len(errors) > 0:
            print_config_report(warnings, errors)
            print("Errors in the config file. "
                  "See configuration status report above.")
            sys.exit()
        elif len(warnings) > 0:
            print_config_report(warnings, errors)

        self.config = self.ucfg.cfg
Exemplo n.º 13
0
 def load_base_config(cls):
     cls._base_config = get_user_config(
         cls.config_file, modules='basin_setup')
Exemplo n.º 14
0
def run_awsm_daily_ops(config_file):
    """
    Run each day seperately. Calls run_awsm
    """
    # define some formats
    fmt_day = '%Y%m%d'
    fmt_cfg = '%Y-%m-%d %H:%M'
    add_day = pd.to_timedelta(24, unit='h')

    # get config instance
    config = get_user_config(config_file,
                             modules=['smrf', 'awsm'])

    # copy the config and get total start and end
    # config = deepcopy(base_config)
    # set naming style
    config.raw_cfg['paths']['folder_date_style'] = 'day'
    config.apply_recipes()
    config = cast_all_variables(config, config.mcfg)

    # get the water year
    cfg_start_date = pd.to_datetime(config.cfg['time']['start_date'])
    tzinfo = pytz.timezone(config.cfg['time']['time_zone'])
    wy = utils.water_day(cfg_start_date.replace(tzinfo=tzinfo))[1]

    # find the model start depending on restart
    if config.cfg['isnobal restart']['restart_crash']:
        offset_wyhr = int(config.cfg['isnobal restart']['wyh_restart_output'])
        wy_start = pd.to_datetime('{:d}-10-01'.format(wy - 1))
        model_start = wy_start + pd.to_timedelta(offset_wyhr, unit='h')
    else:
        model_start = config.cfg['time']['start_date']

    model_end = config.cfg['time']['end_date']
    isops = config.cfg['paths']['isops']
    if isops:
        devops = 'ops'
    else:
        devops = 'devel'

    # find output location for previous output
    paths = config.cfg['paths']

    prev_out_base = os.path.join(paths['path_dr'],
                                 paths['basin'],
                                 devops,
                                 'wy{}'.format(wy),
                                 paths['proj'],
                                 'runs')

    prev_data_base = os.path.join(paths['path_dr'],
                                  paths['basin'],
                                  devops,
                                  'wy{}'.format(wy),
                                  paths['proj'],
                                  'data')

    # find day of start and end
    start_day = pd.to_datetime(model_start.strftime(fmt_day))
    end_day = pd.to_datetime(model_end.strftime(fmt_day))

    # find total range of run
    ndays = int((end_day-start_day).days) + 1
    date_list = [start_day +
                 pd.to_timedelta(x, unit='D') for x in range(0, ndays)]

    # loop through daily runs and run awsm
    for idd, sd in enumerate(date_list):
        new_config = copy.deepcopy(config)
        if idd > 0:
            new_config.raw_cfg['isnobal restart']['restart_crash'] = False
            new_config.raw_cfg['grid']['thresh_normal'] = 60
            new_config.raw_cfg['grid']['thresh_medium'] = 10
            new_config.raw_cfg['grid']['thresh_small'] = 1
        # get the end of the day
        ed = sd + add_day

        # make sure we're in the model date range
        if sd < model_start:
            sd = model_start
        if ed > model_end:
            ed = model_end

        # set the start and end dates
        new_config.raw_cfg['time']['start_date'] = sd.strftime(fmt_cfg)
        new_config.raw_cfg['time']['end_date'] = ed.strftime(fmt_cfg)

        # reset the initialization
        if idd > 0:
            # find previous output file
            prev_day = sd - pd.to_timedelta(1, unit='D')
            prev_out = os.path.join(prev_out_base,
                                    'run{}'.format(prev_day.strftime(fmt_day)),
                                    'snow.nc')
            # reset if running the model
            if new_config.cfg['awsm master']['model_type'] is not None:
                new_config.raw_cfg['files']['init_type'] = 'netcdf_out'
                new_config.raw_cfg['files']['init_file'] = prev_out

            # if we have a previous storm day file, use it
            prev_storm = os.path.join(prev_data_base,
                                      'data{}'.format(
                                          prev_day.strftime(fmt_day)),
                                      'smrfOutputs', 'storm_days.nc')
            if os.path.isfile(prev_storm):
                new_config.raw_cfg['precip']['storm_days_restart'] = prev_storm

        # apply recipes with new settings
        new_config.apply_recipes()
        new_config = cast_all_variables(new_config, new_config.mcfg)

        # run awsm for the day
        run_awsm(new_config)
Exemplo n.º 15
0
    def __init__(self, config):
        """
        Initialize the model, read config file, start and end date, and logging
        Args:
            config: string path to the config file or inicheck UserConfig instance
        """
        # read the config file and store
        awsm_mcfg = MasterConfig(modules='awsm')
        smrf_mcfg = MasterConfig(modules='smrf')

        if isinstance(config, str):
            if not os.path.isfile(config):
                raise Exception('Configuration file does not exist --> {}'
                                .format(config))
            configFile = config

            try:
                combined_mcfg = MasterConfig(modules=['smrf', 'awsm'])

                # Read in the original users config
                self.ucfg = get_user_config(configFile, mcfg=combined_mcfg)
                self.configFile = configFile

            except UnicodeDecodeError as e:
                print(e)
                raise Exception(('The configuration file is not encoded in '
                                 'UTF-8, please change and retry'))

        elif isinstance(config, UserConfig):
            self.ucfg = config
            configFile = ''

        else:
            raise Exception(
                'Config passed to AWSM is neither file name nor UserConfig instance')

        # get the git version
        self.gitVersion = awsm_utils.getgitinfo()

        # create blank log and error log because logger is not initialized yet
        self.tmp_log = []
        self.tmp_err = []
        self.tmp_warn = []

        # Check the user config file for errors and report issues if any
        self.tmp_log.append("Checking config file for issues...")
        warnings, errors = check_config(self.ucfg)
        print_config_report(warnings, errors)

        self.config = self.ucfg.cfg

        # Exit AWSM if config file has errors
        if len(errors) > 0:
            print("Errors in the config file. "
                  "See configuration status report above.")
            # sys.exit()

        # ################## Decide which modules to run #####################
        self.do_smrf = self.config['awsm master']['run_smrf']
        #self.do_isnobal = self.config['awsm master']['run_isnobal']
        self.model_type = self.config['awsm master']['model_type']
        # self.do_smrf_ipysnobal = \
        #     self.config['awsm master']['run_smrf_ipysnobal']
        # self.do_ipysnobal = self.config['awsm master']['run_ipysnobal']
        self.do_forecast = False
        if 'gridded' in self.config and self.do_smrf:
            self.do_forecast = self.config['gridded']['hrrr_forecast_flag']

            # WARNING: The value here is inferred in SMRF.data.loadGrid. A
            # change here requires a change there
            self.n_forecast_hours = 18

        # Options for converting files
        self.do_make_in = self.config['awsm master']['make_in']
        self.do_make_nc = self.config['awsm master']['make_nc']
        # do report?
        # self.do_report = self.config['awsm master']['do_report']
        self.snowav_config = self.config['awsm master']['snowav_config']

        # options for masking isnobal
        self.mask_isnobal = self.config['awsm master']['mask_isnobal']

        # prompt for making directories
        self.prompt_dirs = self.config['awsm master']['prompt_dirs']

        # store smrf version if running smrf
        self.smrf_version = smrf.__version__

        # ################ Time information ##################
        self.start_date = pd.to_datetime(self.config['time']['start_date'])
        self.end_date = pd.to_datetime(self.config['time']['end_date'])
        self.time_step = self.config['time']['time_step']
        self.tmz = self.config['time']['time_zone']
        self.tzinfo = pytz.timezone(self.config['time']['time_zone'])
        # date to use for finding wy
        tmp_date = self.start_date.replace(tzinfo=self.tzinfo)
        tmp_end_date = self.end_date.replace(tzinfo=self.tzinfo)

        # find water year hour of start and end date
        self.start_wyhr = int(utils.water_day(tmp_date)[0]*24)
        self.end_wyhr = int(utils.water_day(tmp_end_date)[0]*24)

        # find start of water year
        tmpwy = utils.water_day(tmp_date)[1] - 1
        self.wy_start = pd.to_datetime('{:d}-10-01'.format(tmpwy))

        # ################ Store some paths from config file ##################
        # path to the base drive (i.e. /data/blizzard)
        if self.config['paths']['path_dr'] is not None:
            self.path_dr = os.path.abspath(self.config['paths']['path_dr'])
        else:
            print('No base path to drive given. Exiting now!')
            sys.exit()

        # name of your basin (i.e. Tuolumne)
        self.basin = self.config['paths']['basin']
        # water year of run
        self.wy = utils.water_day(tmp_date)[1]
        # if the run is operational or not
        self.isops = self.config['paths']['isops']
        # name of project if not an operational run
        self.proj = self.config['paths']['proj']
        # check for project description
        self.desc = self.config['paths']['desc']
        # find style for folder date stamp
        self.folder_date_style = self.config['paths']['folder_date_style']

        # setting to output in seperate daily folders
        self.daily_folders = self.config['awsm system']['daily_folders']
        if self.daily_folders and not self.run_smrf_ipysnobal:
            raise ValueError('Cannot run daily_folders with anything other'
                             ' than run_smrf_ipysnobal')

        if self.do_forecast:
            self.tmp_log.append('Forecasting set to True')

            # self.fp_forecastdata = self.config['gridded']['wrf_file']
            # if self.fp_forecastdata is None:
            #     self.tmp_err.append('Forecast set to true, '
            #                         'but no grid file given')
            #     print("Errors in the config file. See configuration "
            #           "status report above.")
            #     print(self.tmp_err)
            #     sys.exit()

            if self.config['system']['threading']:
                # Can't run threaded smrf if running forecast_data
                self.tmp_err.append('Cannot run SMRF threaded with'
                                    ' gridded input data')
                print(self.tmp_err)
                sys.exit()

        # Time step mass thresholds for iSnobal
        self.mass_thresh = []
        self.mass_thresh.append(self.config['grid']['thresh_normal'])
        self.mass_thresh.append(self.config['grid']['thresh_medium'])
        self.mass_thresh.append(self.config['grid']['thresh_small'])

        # threads for running iSnobal
        self.ithreads = self.config['awsm system']['ithreads']
        # how often to output form iSnobal
        self.output_freq = self.config['awsm system']['output_frequency']
        # number of timesteps to run if ou don't want to run the whole thing
        self.run_for_nsteps = self.config['awsm system']['run_for_nsteps']
        # pysnobal output variables
        self.pysnobal_output_vars = self.config['awsm system']['variables']
        self.pysnobal_output_vars = [wrd.lower()
                                     for wrd in self.pysnobal_output_vars]
        # snow and emname
        self.snow_name = self.config['awsm system']['snow_name']
        self.em_name = self.config['awsm system']['em_name']

        # options for restarting iSnobal
        self.restart_crash = False
        if self.config['isnobal restart']['restart_crash']:
            self.restart_crash = True
            # self.new_init = self.config['isnobal restart']['new_init']
            self.depth_thresh = self.config['isnobal restart']['depth_thresh']
            self.restart_hr = \
                int(self.config['isnobal restart']['wyh_restart_output'])
            self.restart_folder = self.config['isnobal restart']['output_folders']

        # iSnobal active layer
        self.active_layer = self.config['grid']['active_layer']

        # if we are going to run ipysnobal with smrf
        if self.model_type in ['ipysnobal', 'smrf_ipysnobal']:
            self.ipy_threads = self.ithreads
            self.ipy_init_type = \
                self.config['files']['init_type']
            self.forcing_data_type = \
                self.config['ipysnobal']['forcing_data_type']

        # parameters needed for restart procedure
        self.restart_run = False
        if self.config['isnobal restart']['restart_crash']:
            self.restart_run = True
            # find restart hour datetime
            reset_offset = pd.to_timedelta(self.restart_hr, unit='h')
            # set a new start date for this run
            self.restart_date = self.wy_start + reset_offset
            self.tmp_log.append('Restart date is {}'.format(self.start_date))

        # read in update depth parameters
        self.update_depth = False
        if 'update depth' in self.config:
            self.update_depth = self.config['update depth']['update']
        if self.update_depth:
            self.update_file = self.config['update depth']['update_file']
            self.update_buffer = self.config['update depth']['buffer']
            self.flight_numbers = self.config['update depth']['flight_numbers']
            # if flights to use is not list, make it a list
            if self.flight_numbers is not None:
                if not isinstance(self.flight_numbers, list):
                    self.flight_numbers = [self.flight_numbers]

        # list of sections releated to AWSM
        # These will be removed for smrf config
        self.sec_awsm = awsm_mcfg.cfg.keys()
        self.sec_smrf = smrf_mcfg.cfg.keys()

        # Make rigid directory structure
        self.mk_directories()

        # ################ Topo data for iSnobal ##################
        # get topo stats
        self.csys = self.config['grid']['csys'].upper()
        self.nbits = int(self.config['grid']['nbits'])
        self.soil_temp = self.config['soil_temp']['temp']
        # get topo class
        self.topo = mytopo(self.config['topo'], self.mask_isnobal,
                           self.model_type, self.csys, self.pathdd)

        # ################ Generate config backup ##################
        # if self.config['output']['input_backup']:
        # set location for backup and output backup of awsm sections
        config_backup_location = \
            os.path.join(self.pathdd, 'awsm_config_backup.ini')
        generate_config(self.ucfg, config_backup_location)

        # create log now that directory structure is done
        self.createLog()

        # if we have a model, initialize it
        if self.model_type is not None:
            self.myinit = modelInit(self._logger, self.config, self.topo,
                                    self.start_wyhr, self.pathro, self.pathrr,
                                    self.pathinit, self.wy_start)
Exemplo n.º 16
0
 def ucfg(self, full_config_ini):
     """
     Function scoped version of the ucfg fixture so changes can be made to it
     w/o disrupting other tests.
     """
     return get_user_config(full_config_ini, modules="inicheck")
Exemplo n.º 17
0
    def __init__(self, config):
        """Katana class created to wrap all functionality needed to run
        WindNinja in the context of the USDA ARS snow-water supply
        modeling workflow

        Arguments:
            config {string} -- path to the config file or an
                                inicheck UserConfig object
        """

        if isinstance(config, str):
            if not os.path.isfile(config):
                raise Exception(
                    'Configuration file does not exist --> {}'.format(config))

            try:
                # Read in the original users config
                self.ucfg = get_user_config(config, modules='katana')

            except UnicodeDecodeError as e:
                print(e)
                raise Exception(('The configuration file is not encoded in '
                                 'UTF-8, please change and retry'))

        elif isinstance(config, UserConfig):
            self.ucfg = config

        else:
            raise Exception('Config passed to Katana is neither file name nor \
                    UserConfig instance')

        self.config_file = self.ucfg.filename

        warnings, errors = check_config(self.ucfg)
        print_config_report(warnings, errors)
        self.config = self.ucfg.cfg

        if len(errors) > 0:
            raise Exception("Error in config file. Check report above.")

        self.start_timing = datetime.now()

        ################################################
        # Start parsing the arguments
        ################################################
        self.parse_config()

        ################################################
        # Create logger
        ################################################
        self.create_log()

        ################################################
        # Initialize the topo
        ################################################
        self.topo = Topo(self.config)

        ################################################
        # Initialize the input data
        ################################################
        self.initialize_input_data()

        self._logger.debug('Katana initialized')
Exemplo n.º 18
0
 def load_base_config(cls):
     cls._base_config = get_user_config(cls.config_file, modules='smrf')
Exemplo n.º 19
0
    def __init__(self, config_file, awsm=None, end_date=None):

        print('Reading {} and loading files...'.format(config_file))

        self.config_file = config_file
        snowav_mcfg = MasterConfig(modules='snowav')
        ucfg = get_user_config(self.config_file, mcfg=snowav_mcfg)
        ucfg.apply_recipes()
        ucfg = cast_all_variables(ucfg, ucfg.mcfg)
        self.snowav_path = get_snowav_path()

        warnings, errors = check_config(ucfg)
        if errors != [] or warnings != []:
            print_config_report(warnings, errors)

        self.tmp_log = []
        self.tmp_err = []
        self.tmp_warn = []
        self.proc_time_start = datetime.now()

        ####################################################
        #            snowav                                #
        ####################################################
        self.loglevel = ucfg.cfg['snowav']['log_level'].upper()
        self.log_to_file = ucfg.cfg['snowav']['log_to_file']
        self.save_path = ucfg.cfg['snowav']['save_path']
        self.units = ucfg.cfg['snowav']['units']
        self.elev_bins = ucfg.cfg['snowav']['elev_bins']
        self.directory = ucfg.cfg['snowav']['directory']
        self.dempath = ucfg.cfg['snowav']['dempath']
        self.run_name = ucfg.cfg['snowav']['run_name']
        self.plotorder = ucfg.cfg['snowav']['masks']
        self.plotlabels = ucfg.cfg['snowav']['plotlabels']
        self.report_only = ucfg.cfg['snowav']['report_only']

        ####################################################
        #           run                                    #
        ####################################################
        self.dplcs = ucfg.cfg['run']['decimals']
        self.start_date = ucfg.cfg['run']['start_date']
        self.end_date = ucfg.cfg['run']['end_date']

        if end_date is not None:
            self.end_date = end_date
            self.tmp_log.append(' Overriding config end_date with '
                                '{} given with snowav call'.format(end_date))

            if self.end_date <= self.start_date:
                raise Exception(
                    'end_date {} earlier than start_date {}'.format(
                        self.end_date, self.start_date))

        if self.start_date is not None and self.end_date is not None:
            self.start_date = self.start_date
            self.end_date = self.end_date

            if self.start_date >= self.end_date:
                self.tmp_log.append(' Error: [run] start_date >= end_date')
                raise Exception('[run] start_date >= [run] end_date')

        else:
            self.tmp_log.append(' [run] start_date and/or end_date was not '
                                'defined in config file, will be assigned '
                                'by available dates in directory')

        self.all_subdirs = ucfg.cfg['run']['all_subdirs']

        if (ucfg.cfg['run']['directory'] is None) and (awsm is not None):
            if self.all_subdirs is True:
                self.run_dirs = ([
                    awsm.pathr + s for s in os.listdir(awsm.pathr)
                    if (os.path.isdir(awsm.pathr + s))
                ])
            else:
                self.run_dirs = awsm.pathr
                if type(self.run_dirs) != list:
                    self.run_dirs = [self.run_dirs]

        else:
            directory = ucfg.cfg['run']['directory']

            if len(directory) == 1:
                directory = directory[0]

            if self.all_subdirs is True:
                self.run_dirs = ([
                    directory + s for s in os.listdir(directory)
                    if (os.path.isdir(directory + s))
                ])
            else:
                self.run_dirs = ucfg.cfg['run']['directory']
                if type(self.run_dirs) != list:
                    self.run_dirs = [self.run_dirs]

        self.run_dirs.sort()

        ####################################################
        #         database
        ####################################################
        self.mysql = ucfg.cfg['database']['mysql']
        self.db_user = ucfg.cfg['database']['user']
        self.db_password = ucfg.cfg['database']['password']
        self.db_host = ucfg.cfg['database']['host']
        self.db_port = ucfg.cfg['database']['port']
        self.db_convert = ucfg.cfg['database']['convert_ws']
        self.add_basins = ucfg.cfg['database']['add_basins']
        self.db_overwrite = ucfg.cfg['database']['overwrite']
        self.properties = ucfg.cfg['database']['properties']
        self.sqlite = ucfg.cfg['database']['sqlite']

        base_bands = [
            'swi_z', 'evap_z', 'swe_z', 'depth', 'density', 'coldcont',
            'precip_z'
        ]

        for band in base_bands:
            if band not in self.properties:
                self.tmp_log.append(' WARNING! Config option [database] '
                                    'properties does not contain '
                                    '{}'.format(band))

        if ((self.mysql is not None)
                and ((self.db_user is None) or (self.db_password is None) or
                     (self.db_host is None) or (self.db_port is None))):
            raise Exception('If using config option [database] mysql, must '
                            'also supply user, password, host, and port')

        if self.sqlite is not None:
            if not os.path.isdir(os.path.dirname(self.sqlite)):
                raise Exception('{} does not contain a valid base '
                                'path'.format(self.sqlite))
            self.sqlite = 'sqlite:///' + self.sqlite
            self.db_type = 'sqlite'

            if self.mysql is not None:
                raise Exception('Config option [database] section contains '
                                'both "mysql" and "sqlite" entries, pick one.')
        else:
            self.db_type = 'sql'

        ####################################################
        #           validate                               #
        ####################################################
        self.val_stns = ucfg.cfg['validate']['stations']
        self.val_lbls = ucfg.cfg['validate']['labels']
        self.val_client = ucfg.cfg['validate']['client']
        self.wxdb_user = ucfg.cfg['validate']['user']
        self.wxdb_password = ucfg.cfg['validate']['password']
        self.wxdb_host = ucfg.cfg['validate']['host']
        self.wxdb_port = ucfg.cfg['validate']['port']
        self.point_values = ucfg.cfg['validate']['point_values']
        self.point_values_csv = ucfg.cfg['validate']['point_values_csv']
        self.point_values_date = ucfg.cfg['validate']['point_values_date']
        self.point_values_properties = ucfg.cfg['validate'][
            'point_values_properties']
        self.point_values_heading = ucfg.cfg['validate'][
            'point_values_heading']
        self.point_values_settings = ucfg.cfg['validate'][
            'point_values_settings']

        for n in range(0, 10):
            self.point_values_settings[n] = int(self.point_values_settings[n])

        if self.point_values and self.point_values_csv is None:
            self.point_values = False
            self.tmp_log.append(' Config option [validate] point_values_csv '
                                'was not supplied, point_values being set '
                                'to False')

        if self.point_values and self.point_values_date is None:
            self.point_values = False
            self.tmp_log.append(' Config option [validate] point_values_date '
                                'was not supplied, point_values being set '
                                'to False')

        ####################################################
        #           diagnostics                            #
        ####################################################
        self.diagnostics_flag = ucfg.cfg['diagnostics']['diagnostics']
        self.diag_basins = ucfg.cfg['diagnostics']['basins']
        self.diag_limit = ucfg.cfg['diagnostics']['limit']

        if self.diagnostics_flag:
            if self.diag_basins is not None:
                for basin in self.diag_basins:
                    if basin not in self.plotorder:
                        self.tmp_log.append(' Config [diagnostics] basin: "{}"'
                                            ' does not match [snowav] masks: '
                                            '"{}", diagnostics set to '
                                            'False'.format(
                                                basin, self.plotorder))
                        self.diagnostics_flag = False

        if 'snow_line' not in self.properties and self.diagnostics_flag:
            self.diagnostics_flag = False
            self.tmp_log.append(' Required properties in [database] properties'
                                ' for [diagnostics] does not exist, setting '
                                'diagnostics: False')

        self.inputs_flag = ucfg.cfg['diagnostics']['inputs_table']
        self.inputs_variables = ucfg.cfg['diagnostics']['inputs_variables']
        self.inputs_percentiles = ucfg.cfg['diagnostics']['inputs_percentiles']
        self.inputs_methods = ucfg.cfg['diagnostics']['inputs_methods']
        self.inputs_basins = ucfg.cfg['diagnostics']['inputs_basins']

        if self.inputs_basins is not None and self.plotorder is not None:
            for basin in self.inputs_basins:
                if basin not in self.plotorder:
                    self.tmp_log.append(
                        ' Config option [diagnostics] '
                        'inputs_basins: {} does not match what '
                        'was supplied in [snowav] masks: {}, '
                        'inputs set to '
                        'False'.format(basin, self.plotorder))
                    self.inputs_flag = False

        if self.inputs_flag:
            s = [x + ', ' for x in self.inputs_variables]
            self.tmp_log.append(' Using variables {} for inputs '
                                'summary'.format(''.join(s)))

            s = [x + ', ' for x in self.inputs_methods]
            self.tmp_log.append(' Using methods {} for inputs '
                                'summary'.format(''.join(s)))

        ####################################################
        #          plots                                   #
        ####################################################
        self.dpi = ucfg.cfg['plots']['dpi']
        self.depth_clip = ucfg.cfg['plots']['depth_clip']
        self.clims_percent = ucfg.cfg['plots']['clims_percent']
        self.subs_fig = ucfg.cfg['plots']['subs_fig']
        self.density_flag = ucfg.cfg['plots']['density']
        self.swi_flag = ucfg.cfg['plots']['swi']
        self.current_image_flag = ucfg.cfg['plots']['current_image']
        self.image_change_flag = ucfg.cfg['plots']['image_change']
        self.flt_image_change_clims = ucfg.cfg['plots'][
            'flt_image_change_clims']
        self.cold_content_flag = ucfg.cfg['plots']['cold_content']
        self.swe_volume_flag = ucfg.cfg['plots']['swe_volume']
        self.basin_total_flag = ucfg.cfg['plots']['basin_total']
        self.stn_validate_flag = ucfg.cfg['plots']['stn_validate']
        self.nash_sut_flag = ucfg.cfg['plots']['disp_nash_sut']
        self.stns_file = ucfg.cfg['plots']['stns_file']
        self.inputs_fig_flag = ucfg.cfg['plots']['inputs']
        self.plots_inputs_variables = ucfg.cfg['plots']['inputs_variables']
        self.compare_runs_flag = ucfg.cfg['plots']['compare_runs']
        self.compare_run_names = ucfg.cfg['plots']['compare_run_names']
        self.compare_run_labels = ucfg.cfg['plots']['compare_run_labels']
        self.compare_run_wys = ucfg.cfg['plots']['compare_run_wys']
        self.precip_depth_flag = ucfg.cfg['plots']['precip_depth']
        self.basin_detail_flag = ucfg.cfg['plots']['basin_detail']
        self.update_file = ucfg.cfg['plots']['update_file']
        self.figsize = ucfg.cfg['plots']['fig_size']
        self.write_properties = ucfg.cfg['plots']['write_properties']
        self.point_values_flag = ucfg.cfg['plots']['point_values']

        if self.flt_image_change_clims[0] < 0:
            self.flt_image_change_clims[0] = 0
        if self.flt_image_change_clims[1] > 100:
            self.flt_image_change_clims[1] = 100

        if (self.write_properties is not None
                and type(self.write_properties) != list):
            self.write_properties = [self.write_properties]

        numbers = ucfg.cfg['plots']['update_numbers']

        if numbers is not None:
            if type(numbers) != list:
                numbers = [numbers]
            self.update_numbers = [x - 1 for x in numbers]
        else:
            self.update_numbers = None

        if (self.compare_runs_flag and ((self.compare_run_names is None) or
                                        (self.compare_run_labels is None)
                                        or self.compare_run_wys is None)):
            self.tmp_log.append(' Config option [plots] compare_runs set to '
                                'True, but one of compare_run_names, '
                                'compare_run_labels, or compare_run_wys is '
                                'empty, setting compare_runs to False')
            self.compare_runs_flag = False

        if (self.compare_runs_flag and
            (len(self.compare_run_names) != len(self.compare_run_labels))):
            self.tmp_log.append(' Config option [plots] compare_runs set to '
                                'True, must supply equal length '
                                'compare_run_names and compare_run_labels, '
                                'resetting compare_runs to False')
            self.compare_runs_flag = False

        if self.update_file is not None:
            self.flt_flag = True
        else:
            self.flt_flag = False

        if (self.stn_validate_flag and (self.val_client is None)
                or (self.val_stns is None) or (self.val_lbls is None)
                or (self.wxdb_user is None) or (self.wxdb_password is None)):
            self.tmp_log.append(' Config option [plots] stn_validate is being '
                                'set to False')

            self.stn_validate_flag = False

        if len(self.point_values_settings) != 14:
            self.tmp_log.append(' Expected [validate] point_values_settings '
                                'to have 14 values, point_values set to False')
            self.point_values_flag = False

        for var in self.plots_inputs_variables:
            if var not in self.inputs_variables:
                self.plots_inputs_variables.remove(var)
                self.tmp_log.append(' Config option [plots] inputs_variables '
                                    'value {} not present in [diagnostics] '
                                    'inputs_variables, being '
                                    'removed'.format(var))

        ####################################################
        #          report                                  #
        ####################################################
        self.report_flag = ucfg.cfg['report']['report']
        self.print_latex = ucfg.cfg['report']['print_latex']
        self.report_name = ucfg.cfg['report']['file']
        self.rep_title = ucfg.cfg['report']['title']
        self.rep_path = ucfg.cfg['report']['save_path']
        self.env_path = ucfg.cfg['report']['env_path']
        self.templ_path = ucfg.cfg['report']['templ_path']
        self.tex_file = ucfg.cfg['report']['tex_file']
        self.summary_file = ucfg.cfg['report']['summary']
        self.figs_tpl_path = ucfg.cfg['report']['figs_tpl_path']
        self.flight_figs = ucfg.cfg['report']['flight_figs']
        self.tables = ucfg.cfg['report']['tables']
        self.report_diagnostics = ucfg.cfg['report']['diagnostics']
        self.report_diagnostics_day = ucfg.cfg['report']['diagnostics_day']
        self.rep_dplcs = ucfg.cfg['report']['decimals']

        if (self.report_diagnostics
                and (not self.inputs_fig_flag or not self.diagnostics_flag)):
            self.tmp_log.append(" [report] diagnostics: True, but must also "
                                "have [plots] inputs: True and [diagnostics] "
                                "diagnostics: True, setting to False")
            self.report_diagnostics = False

        if self.report_diagnostics and self.report_diagnostics_day[0] != 'any':

            if (calendar.day_name[datetime.now().weekday()]
                    not in self.report_diagnostics_day):
                self.report_diagnostics = False
                self.tmp_log.append(" Per [report] diagnostics_day: {}, "
                                    "setting diagnostics: "
                                    "False".format(
                                        self.report_diagnostics_day))

        self.rep_swi_flag = ucfg.cfg['report']['swi']
        if not self.swi_flag:
            self.rep_swi_flag = False

        self.rep_image_change_flag = ucfg.cfg['report']['image_change']
        if not self.image_change_flag:
            self.rep_image_change_flag = False

        self.rep_cold_content_flag = ucfg.cfg['report']['cold_content']
        if not self.cold_content_flag:
            self.rep_cold_content_flag = False

        self.rep_swe_volume_flag = ucfg.cfg['report']['swe_volume']
        if not self.swe_volume_flag:
            self.rep_swe_volume_flag = False

        self.rep_basin_total_flag = ucfg.cfg['report']['basin_total']
        if not self.basin_total_flag:
            self.rep_basin_total_flag = False

        self.rep_stn_validate_flag = ucfg.cfg['report']['stn_validate']
        if not self.stn_validate_flag:
            self.rep_stn_validate_flag = False

        self.rep_compare_runs_flag = ucfg.cfg['report']['compare_runs']
        if not self.compare_runs_flag:
            self.rep_compare_runs_flag = False

        self.rep_precip_depth_flag = ucfg.cfg['report']['precip_depth']
        if not self.precip_depth_flag:
            self.rep_precip_depth_flag = False

        # check paths to see if they need default snowav path
        if self.env_path is None:
            self.env_path = os.path.abspath(
                os.path.join(snowav.__path__[0],
                             "report/template/section_text"))
        if self.templ_path is None:
            self.templ_path = os.path.abspath(
                os.path.join(snowav.__path__[0], "report/template"))
        if self.summary_file is None:
            self.summary_file = os.path.abspath(
                os.path.join(
                    snowav.__path__[0],
                    "report/template/section_text/report_summary.txt"))
        if self.tex_file is None:
            self.tex_file = os.path.abspath(
                os.path.join(snowav.__path__[0],
                             "report/template/snowav_report.text"))
        if self.figs_tpl_path is None:
            self.figs_tpl_path = os.path.abspath(
                os.path.join(snowav.__path__[0], "report/figs"))

        ####################################################
        #           query                                  #
        ####################################################
        self.query_flag = ucfg.cfg['query']['query']
        self.q_basins = ucfg.cfg['query']['basins']
        self.q_value = ucfg.cfg['query']['value']
        self.q_run_name = ucfg.cfg['query']['run_name']
        self.q_print_all_runs = ucfg.cfg['query']['print_all_runs']
        self.q_start_date = ucfg.cfg['query']['start_date']
        self.q_end_date = ucfg.cfg['query']['end_date']
        self.q_total = ucfg.cfg['query']['total']
        self.q_output = ucfg.cfg['query']['output']
        self.q_csv_base_path = ucfg.cfg['query']['csv_base_path']
        self.q_database = ucfg.cfg['query']['database']

        ####################################################
        #           inflow                                 #
        ####################################################
        self.inflow_flag = ucfg.cfg['inflow']['inflow']
        self.inflow_data = ucfg.cfg['inflow']['inflow_data']
        self.summary_csv = ucfg.cfg['inflow']['summary_csv']
        self.inflow_headings = ucfg.cfg['inflow']['inflow_headings']
        self.basin_headings = ucfg.cfg['inflow']['basin_headings']
        self.sheet_name = ucfg.cfg['inflow']['sheet_name']
        self.skiprows = ucfg.cfg['inflow']['skiprows']
        self.overwrite = ucfg.cfg['inflow']['overwrite']
        self.file_base = ucfg.cfg['inflow']['file_base']
        self.date_idx = ucfg.cfg['inflow']['date_idx']
        self.convert = ucfg.cfg['inflow']['convert']

        self.ucfg = ucfg
Exemplo n.º 20
0
 def setUpClass(self):
     base = os.path.dirname(__file__)
     self.ucfg = get_user_config(os.path.join(
         base, "test_configs/full_config.ini"),
                                 modules="inicheck")
Exemplo n.º 21
0
def full_ucfg(full_config_ini, master_ini):
    return get_user_config(full_config_ini, master_files=master_ini)
Exemplo n.º 22
0
def main():
    """
    Runs a command that only takes a config. This was originally written to
    perform multiyear analysis on swiflows calibration commands.
    """
    p = argparse.ArgumentParser(
        description="Runs a command that takes a config"
        " file for mulitple years.")

    p.add_argument("cmd", help="Command to execute like cmd config.ini")

    p.add_argument("config",
                   help="Config file containing all settings for"
                   " running the cmd")

    p.add_argument("-wy",
                   "--years",
                   dest='years',
                   required=True,
                   nargs="+",
                   help="Water years to run by changing the start and end"
                   " times in the config, if not provided it will just run"
                   " the cmd, assumes the month and day assigned in the "
                   " config are constant")

    p.add_argument("-m",
                   "--modules",
                   dest='modules',
                   required=False,
                   nargs='+',
                   help="Python packages the config is associated to")
    p.add_argument("-o",
                   "--output",
                   dest='output',
                   required=False,
                   default='./output',
                   help="Python packages the config is associated to")

    args = p.parse_args()

    # Manage the config paths and modules and grab the config
    orig_path = abspath(args.config)
    if args.modules:
        modules = args.modules
    else:
        modules = None

    ucfg = get_user_config(orig_path, modules=modules)

    # Setup output path
    output = abspath(args.output)
    if not isdir(output):
        os.makedirs(output)

    print("Running {} {} over {}...".format(args.cmd, args.config,
                                            ", ".join(args.years)))

    # Determine the dates to modify and report it
    results = find_section_item_start_stop(ucfg)
    sec_name = results['section']
    start_name = results['start']
    end_name = results['end']
    start = ucfg.cfg[sec_name][start_name]
    end = ucfg.cfg[sec_name][end_name]

    print("Found the modifiable start and stop dates in the config file...")
    print("Start in configs Section: {} Item: {}".format(sec_name, start_name))
    print("End in configs Section: {} Item: {}".format(sec_name, end_name))
    fmt = "%m-%d"
    print("Running {} year over {} - {}".format(len(args.years),
                                                start.strftime(fmt),
                                                end.strftime(fmt)))

    for wyr in args.years:
        print("Adjusting config for WY{} and running...".format(wyr))

        # Update the paths for modifying
        current_output = join(output, wyr)
        current_path = join(current_output, "config.ini")
        ucfg = set_dates(ucfg, wyr, sec_name, start_name, end_name)

        if not isdir(current_output):
            os.makedirs(current_output)
        else:
            print("WARN: WY{} has data in it, you could be overwriting"
                  " data...".format(wyr))

        # Write the config so we can run
        generate_config(ucfg, current_path)

        # Build the command
        cmd = "{} {}".format(args.cmd, current_path)

        s = check_output(cmd, shell=True)
        print(s.decode("utf-8"))
Exemplo n.º 23
0
    def __init__(self, config, external_logger=None):
        """
        Initialize the model, read config file, start and end date, and logging
        """
        # read the config file and store
        if isinstance(config, str):
            if not os.path.isfile(config):
                raise Exception(
                    'Configuration file does not exist --> {}'.format(config))
            configFile = config

            # Read in the original users config
            ucfg = get_user_config(config, modules='smrf')

        elif isinstance(config, UserConfig):
            ucfg = config
            configFile = config.filename

        else:
            raise Exception('Config passed to SMRF is neither file name nor '
                            ' UserConfig instance')
        # start logging
        if external_logger == None:

            if 'log_level' in ucfg.cfg['system']:
                loglevel = ucfg.cfg['system']['log_level'].upper()
            else:
                loglevel = 'INFO'

            numeric_level = getattr(logging, loglevel, None)
            if not isinstance(numeric_level, int):
                raise ValueError('Invalid log level: %s' % loglevel)

            # setup the logging
            logfile = None
            if ucfg.cfg['system']['log_file'] != None:
                logfile = ucfg.cfg['system']['log_file']
                if not os.path.isabs(logfile):
                    logfile = abspath(
                        join(dirname(configFile),
                             ucfg.cfg['system']['log_file']))

                if not os.path.isdir(dirname(logfile)):
                    os.makedirs(dirname(logfile))

                if not os.path.isfile(logfile):
                    with open(logfile, 'w+') as f:
                        f.close()

            fmt = '%(levelname)s:%(name)s:%(message)s'
            if logfile is not None:
                logging.basicConfig(filename=logfile,
                                    level=numeric_level,
                                    filemode='w+',
                                    format=fmt)
            else:
                logging.basicConfig(level=numeric_level)
                coloredlogs.install(level=numeric_level, fmt=fmt)

            self._loglevel = numeric_level

            self._logger = logging.getLogger(__name__)
        else:
            self._logger = external_logger

        # add the title
        title = self.title(2)
        for line in title:
            self._logger.info(line)

        out = ucfg.cfg['output']['out_location']

        # Make the tmp and output directories if they do not exist
        makeable_dirs = [out, join(out, 'tmp')]
        for path in makeable_dirs:
            if not os.path.isdir(path):
                try:
                    self._logger.info("Directory does not exist, Creating:\n{}"
                                      "".format(path))
                    os.makedirs(path)

                except OSError as e:
                    raise e

        self.temp_dir = path

        # Check the user config file for errors and report issues if any
        self._logger.info("Checking config file for issues...")
        warnings, errors = check_config(ucfg)
        print_config_report(warnings, errors, logger=self._logger)
        self.ucfg = ucfg
        self.config = self.ucfg.cfg

        # Exit SMRF if config file has errors
        if len(errors) > 0:
            self._logger.error("Errors in the config file. See configuration"
                               " status report above.")
            sys.exit()

        # Write the config file to the output dir no matter where the project is
        full_config_out = abspath(join(out, 'config.ini'))

        self._logger.info("Writing config file with full options.")
        generate_config(self.ucfg, full_config_out)

        # Process the system variables
        for k, v in self.config['system'].items():
            setattr(self, k, v)

        os.environ['WORKDIR'] = self.temp_dir

        # Get the time section utils
        self.start_date = pd.to_datetime(self.config['time']['start_date'])
        self.end_date = pd.to_datetime(self.config['time']['end_date'])

        # Get the timesetps correctly in the time zone
        d = data.mysql_data.date_range(
            self.start_date, self.end_date,
            timedelta(minutes=int(self.config['time']['time_step'])))

        tzinfo = pytz.timezone(self.config['time']['time_zone'])
        self.date_time = [di.replace(tzinfo=tzinfo) for di in d]
        self.time_steps = len(self.date_time)

        # need to align date time
        if 'date_method_start_decay' in self.config['albedo'].keys():
            self.config['albedo']['date_method_start_decay'] = \
            self.config['albedo']['date_method_start_decay'].replace(tzinfo=tzinfo)
            self.config['albedo']['date_method_end_decay'] = \
            self.config['albedo']['date_method_end_decay'].replace(tzinfo=tzinfo)

        # if a gridded dataset will be used
        self.gridded = False
        self.forecast_flag = False
        if 'gridded' in self.config:
            self.gridded = True
            if self.config['gridded']['data_type'] in [
                    'hrrr_netcdf', 'hrrr_grib'
            ]:
                self.forecast_flag = self.config['gridded'][
                    'hrrr_forecast_flag']

            # hours from start of day
            self.day_hour = self.start_date - pd.to_datetime(
                d[0].strftime("%Y%m%d"))
            self.day_hour = int(self.day_hour / np.timedelta64(1, 'h'))

        if ((self.start_date > datetime.now() and not self.gridded)
                or (self.end_date > datetime.now() and not self.gridded)):
            raise ValueError("A date set in the future can only be used with"
                             " WRF generated data!")

        self.distribute = {}

        if self.config['system']['qotw']:
            self._logger.info(getqotw())

        # Initialize the distribute dict
        self._logger.info('Started SMRF --> %s' % datetime.now())
        self._logger.info('Model start --> %s' % self.start_date)
        self._logger.info('Model end --> %s' % self.end_date)
        self._logger.info('Number of time steps --> %i' % self.time_steps)
Exemplo n.º 24
0
    def __init__(self, config, external_logger=None):
        """
        Initialize the model, read config file, start and end date, and logging
        """
        # read the config file and store
        if isinstance(config, str):
            if not os.path.isfile(config):
                raise Exception(
                    'Configuration file does not exist --> {}'.format(config))
            self.configFile = config

            # Read in the original users config
            ucfg = get_user_config(config, modules='smrf')

        elif isinstance(config, UserConfig):
            ucfg = config
            self.configFile = config.filename

        else:
            raise Exception('Config passed to SMRF is neither file name nor '
                            ' UserConfig instance')
        # start logging
        if external_logger is None:
            self.smrf_logger = logger.SMRFLogger(ucfg.cfg['system'])
            self._logger = logging.getLogger(__name__)
        else:
            self._logger = external_logger

        # add the title
        self.title(2)

        # Make the output directory if it do not exist
        out = ucfg.cfg['output']['out_location']
        os.makedirs(out, exist_ok=True)

        # Check the user config file for errors and report issues if any
        self._logger.info("Checking config file for issues...")
        warnings, errors = check_config(ucfg)
        print_config_report(warnings, errors, logger=self._logger)
        self.ucfg = ucfg
        self.config = self.ucfg.cfg

        # Exit SMRF if config file has errors
        if len(errors) > 0:
            self._logger.error("Errors in the config file. See configuration"
                               " status report above.")
            sys.exit()

        # Write the config file to the output dir
        full_config_out = abspath(join(out, 'config.ini'))

        self._logger.info("Writing config file with full options.")
        generate_config(self.ucfg, full_config_out)

        # Process the system variables
        for k, v in self.config['system'].items():
            setattr(self, k, v)

        self._setup_date_and_time()

        # need to align date time
        if 'date_method_start_decay' in self.config['albedo'].keys():
            self.config['albedo']['date_method_start_decay'] = \
                self.config['albedo']['date_method_start_decay'].replace(
                    tzinfo=self.time_zone)
            self.config['albedo']['date_method_end_decay'] = \
                self.config['albedo']['date_method_end_decay'].replace(
                    tzinfo=self.time_zone)

        # if a gridded dataset will be used
        self.gridded = False
        self.forecast_flag = False
        self.hrrr_data_timestep = False
        if 'gridded' in self.config:
            self.gridded = True
            if self.config['gridded']['data_type'] in ['hrrr_grib']:
                self.hrrr_data_timestep = \
                    self.config['gridded']['hrrr_load_method'] == 'timestep'

        now = datetime.now().astimezone(self.time_zone)
        if ((self.start_date > now and not self.gridded)
                or (self.end_date > now and not self.gridded)):
            raise ValueError("A date set in the future can only be used with"
                             " WRF generated data!")

        self.distribute = {}

        if self.config['system']['qotw']:
            self._logger.info(getqotw())

        # Initialize the distribute dict
        self._logger.info('Started SMRF --> %s' % now)
        self._logger.info('Model start --> %s' % self.start_date)
        self._logger.info('Model end --> %s' % self.end_date)
        self._logger.info('Number of time steps --> %i' % self.time_steps)
Exemplo n.º 25
0
    def __init__(self, core_config, config_file, basin=None):
        """Read in docker-airflow config. """

        mcfg = MasterConfig(path=core_config)
        ucfg = get_user_config(config_file, mcfg=mcfg)
        ucfg.apply_recipes()
        ucfg = cast_all_variables(ucfg, ucfg.mcfg)

        warnings, errors = check_config(ucfg)
        if errors != [] or warnings != []:
            print_config_report(warnings, errors)

        # from basin_arguments section
        self.args = {
            "owner":
            ucfg.cfg['basin_arguments']['owner'],
            "depends_on_past":
            ucfg.cfg['basin_arguments']['depends_on_past'],
            "start_date":
            ucfg.cfg['basin_arguments']['start_date'],
            "email":
            ucfg.cfg['basin_arguments']['email'],
            "email_on_failure":
            ucfg.cfg['basin_arguments']['email_on_failure'],
            "email_on_retry":
            ucfg.cfg['basin_arguments']['email_on_retry'],
            "retries":
            ucfg.cfg['basin_arguments']['retries'],
            "retry_delay":
            timedelta(seconds=ucfg.cfg['basin_arguments']['retry_delay'])
        }

        self.snowav_args = {
            "owner":
            ucfg.cfg['snowav_arguments']['owner'],
            "depends_on_past":
            ucfg.cfg['snowav_arguments']['depends_on_past'],
            "start_date":
            ucfg.cfg['snowav_arguments']['start_date'],
            "email":
            ucfg.cfg['snowav_arguments']['email'],
            "email_on_failure":
            ucfg.cfg['snowav_arguments']['email_on_failure'],
            "email_on_retry":
            ucfg.cfg['snowav_arguments']['email_on_retry'],
            "retries":
            ucfg.cfg['snowav_arguments']['retries'],
            "retry_delay":
            timedelta(seconds=ucfg.cfg['snowav_arguments']['retry_delay'])
        }

        # settings
        aimage = ucfg.cfg['settings']['awsm_image']
        atag = ucfg.cfg['settings']['awsm_tag']
        kimage = ucfg.cfg['settings']['katana_image']
        ktag = ucfg.cfg['settings']['katana_tag']
        simage = ucfg.cfg['settings']['snowav_image']
        stag = ucfg.cfg['settings']['snowav_tag']

        self.settings = {
            "awsm_image": aimage + ':' + atag,
            "katana_image": kimage + ':' + ktag,
            "forecast_path": ucfg.cfg['settings']['forecast_path'],
            "geojson": ucfg.cfg['settings']['geojson'],
            "docker_call_backup": ucfg.cfg['settings']['docker_call_backup'],
            "windninja_nthreads": ucfg.cfg['settings']['windninja_nthreads'],
            "katana_zone_letter": ucfg.cfg['settings']['katana_zone_letter'],
            "katana_zone_number": ucfg.cfg['settings']['katana_zone_number'],
            "wy": ucfg.cfg['settings']['wy'],
            "backup_path": ucfg.cfg['settings']['backup_path'],
            "snowav_image": simage + ':' + stag
        }

        # basin sections
        if basin is not None:
            self.basin_settings = {
                "basin": ucfg.cfg[basin]['basin'],
                "base_path": ucfg.cfg[basin]['base_path'],
                "awsm_config": ucfg.cfg[basin]['awsm_config'],
                "retry_awsm_config": ucfg.cfg[basin]['retry_awsm_config'],
                "snowav_config": ucfg.cfg[basin]['snowav_config'],
                "katana_pixel": ucfg.cfg[basin]['katana_pixel'],
                "awsm_path": ucfg.cfg[basin]['awsm_path'],
                "topo_file": ucfg.cfg[basin]['topo_file'],
                "results_path": ucfg.cfg[basin]['results_path']
            }
Exemplo n.º 26
0
 def load_base_config(cls):
     cls._base_config = get_user_config(cls.config_file,
                                        modules=['smrf', 'awsm'])
Exemplo n.º 27
0
from snowav.utils.MidpointNormalize import MidpointNormalize
import matplotlib.pyplot as plt
import plotables as pltz
import time



# from pylab import *
# from scipy.optimize import curve_fit

# Make command line to enter config file
filepath_cfg = '/home/zachuhlmann/code/code/gdal_CL_utilities_config.ini'
filepath_mcfg = '/home/zachuhlmann/code/code/gdal_CL_utilities_master_config.ini'


ucfg = get_user_config(filepath_cfg, master_files = filepath_mcfg, checking_later = False)
warnings, errors = check_config(ucfg)
print_config_report(warnings, errors)
#checking_later allows not to crash with errors.
cfg = ucfg.cfg

#check that files exist in inicheck
utils_obj = gdalUtils.Flags(cfg['files']['file_path_in_date1'], cfg['files']['file_path_in_date2'],
                                            cfg['files']['file_path_out'])

utils_obj.clip_extent_overlap()
utils_obj.make_diff_mat()

name = cfg['obtain_difference_arrays']['name']
action = cfg['obtain_difference_arrays']['action']
operator = cfg['obtain_difference_arrays']['operator']