Beispiel #1
0
    def parse_time(self):
        """Parse the time configuration
        """

        self.start_date = pd.to_datetime(self.config['time']['start_date'])
        self.end_date = pd.to_datetime(self.config['time']['end_date'])
        self.time_step = self.config['time']['time_step']
        self.tzinfo = pytz.timezone(self.config['time']['time_zone'])

        # date to use for finding wy
        self.start_date = self.start_date.replace(tzinfo=self.tzinfo)
        self.end_date = self.end_date.replace(tzinfo=self.tzinfo)

        # find water year hour of start and end date
        self.start_wyhr = int(utils.water_day(self.start_date)[0]*24)
        self.end_wyhr = int(utils.water_day(self.end_date)[0]*24)
Beispiel #2
0
    def initialize_aso_updates(self, myawsm, update_fp):
        """
        Read in the ASO update file and parse images by date
        Argument:
                myawsm: instantiated awsm class
                update_fp: file pointer to netCDF with all flights in it
        Return:
                update_info: dictionary of updates
        """

        # Update the snow depths in the initialization file using ASO lidar:
        ds = Dataset(update_fp, 'r')
        ds.set_always_mask(False)

        # get all depths, x, y, time
        D_all = ds.variables['depth'][:]
        D_all = D_all.filled(fill_value=np.nan)
        D_all[np.isinf(D_all)] = np.nan
        D_all[D_all > 200.0] = np.nan
        if np.any(D_all) > 100:
            print('Check D_all')
            D_all[D_all > 100.0] = np.nan

        x = ds.variables['x'][:]
        y = ds.variables['y'][:]
        times = ds.variables['time']
        ts = times[:]
        # convert time index to dates
        t = nc.num2date(ts,
                        times.units,
                        times.calendar,
                        only_use_cftime_datetimes=False)

        # find wyhr of dates
        t_wyhr = []
        for t1 in t:
            tmp_date = t1.replace(tzinfo=myawsm.tzinfo)
            # get wyhr
            tmpwyhr = int(utils.water_day(tmp_date)[0] * 24)
            t_wyhr.append(tmpwyhr)

        t_wyhr = np.array(t_wyhr)

        # make dictionary of updates
        update_info = OrderedDict()
        keys = range(1, len(t_wyhr) + 1)
        for idk, k in enumerate(keys):
            # make dictionary for each update
            update_info[k] = {}
            # set update number
            update_info[k]['number'] = k
            update_info[k]['date_time'] = t[idk].replace(tzinfo=myawsm.tzinfo)
            update_info[k]['wyhr'] = t_wyhr[idk]
            # set depth
            update_info[k]['depth'] = D_all[idk, :]

        return update_info, x, y
Beispiel #3
0
def get_timestep_ipw(tstep, input_list, ppt_list, myawsm):
    """
    Pull out a time step from the forcing files (IPW) and
    place that time step into a dict

    Args:
        tstep:      datetime of time step
        input_list: numpy array (1D) of integer time steps given
        ppt_list:   numpy array(1D) of integer time steps for ppt_list
        myawsm:     AWSM instance for current run

    Returns:
        inpt:       dictionary of forcing variable images

    """

    inpt = {}

    # map function from these values to the ones required by snobal
    map_val = {1: 'T_a', 5: 'S_n', 0: 'I_lw', 2: 'e_a', 3: 'u'}
    map_val_prec = {0: 'm_pp', 1: 'percent_snow', 2: 'rho_snow', 3: 'T_pp'}

    # get wy hour
    wyhr = int(utils.water_day(tstep)[0] * 24)
    # if we have inputs matching this water year hour
    if np.any(input_list == wyhr):
        i_in = ipw.IPW(os.path.join(myawsm.pathi, 'in.%04i' % (wyhr)))
        # assign soil temp
        inpt['T_g'] = myawsm.soil_temp * np.ones(
            (myawsm.topo.ny, myawsm.topo.nx))
        # myawsm._logger.info('T_g: {}'.format(myawsm.soil_temp))
        # inpt['T_g'] = -2.5*np.ones((myawsm.topo.ny, myawsm.topo.nx))
        for f, v in map_val.items():
            # if no solar data, give it zero
            if f == 5 and len(i_in.bands) < 6:
                # myawsm._logger.info('No solar data for {}'.format(tstep))
                inpt[v] = np.zeros((myawsm.topo.ny, myawsm.topo.nx))
            else:
                inpt[v] = i_in.bands[f].data
    # assign ppt data if there
    else:
        raise ValueError('No input time steps for {}'.format(tstep))

    if np.any(ppt_list == wyhr):
        i_ppt = ipw.IPW(os.path.join(myawsm.path_ppt, 'ppt.4b_%04i' % (wyhr)))
        for f, v in map_val_prec.items():
            inpt[v] = i_ppt.bands[f].data
    else:
        for f, v in map_val_prec.items():
            inpt[v] = np.zeros((myawsm.topo.ny, myawsm.topo.nx))

    # convert from C to K
    inpt['T_a'] += FREEZE
    inpt['T_pp'] += FREEZE
    inpt['T_g'] += FREEZE

    return inpt
Beispiel #4
0
    def get_netcdf_out(self):
        """
        Get init fields from output netcdf at correct time index
        """
        i = nc.Dataset(self.init_file)

        # find time step indices to grab
        time = i.variables['time'][:]
        t_units = i.variables['time'].units
        nc_calendar = i.variables['time'].calendar
        nc_dates = nc.num2date(
            time,
            t_units,
            nc_calendar,
            only_use_cftime_datetimes=False,
            only_use_python_datetimes=True,
        )

        if self.restart_crash:
            tmpwyhr = self.restart_hr
        else:
            # start date water year hour
            tmpwyhr = self.start_wyhr

        # make sure we account for time zones
        if hasattr(i.variables['time'], 'time_zone'):
            tzn = pytz.timezone(i.variables['time'].time_zone)
            nc_dates = [tzn.localize(ndt) for ndt in nc_dates]
            if self.tzinfo != tzn:
                nc_dates = [self.tzinfo.localize(ndt) for ndt in nc_dates]
        else:
            nc_dates = [ndt.replace(tzinfo=self.tzinfo) for ndt in nc_dates]

        # find water year hours
        nc_wyhr = np.array(
            [utils.water_day(ndt)[0] * 24.0 for ndt in nc_dates])

        # find closest location that the water year hours equal the restart hr
        idt = np.argmin(np.absolute(nc_wyhr - tmpwyhr))  # returns index

        if np.min(np.absolute(nc_wyhr - tmpwyhr)) > 24.0:
            self.logger.error(
                'No time in restart file that is within a day of restart time')

        self.logger.warning(
            'Initializing PySnobal with state from water year hour {}'.format(
                nc_wyhr[idt]))

        self.init['z_s'] = i.variables['thickness'][idt, :]
        self.init['rho'] = i.variables['snow_density'][idt, :]
        self.init['T_s_0'] = i.variables['temp_surf'][idt, :]
        self.init['T_s'] = i.variables['temp_snowcover'][idt, :]
        self.init['T_s_l'] = i.variables['temp_lower'][idt, :]
        self.init['h2o_sat'] = i.variables['water_saturation'][idt, :]

        i.close()
Beispiel #5
0
    def run_single_fist_step(self, s):
        """
        mimic the main.c from the Snobal model. Recieves forcing data from SMRF
        in non-threaded application and initializes very first step.

        Args:
            s:  smrf class instance

        """

        # loop through the input
        # do_data_tstep needs two input records so only go
        # to the last record-1

        self.data_tstep = self.tstep_info[0]['time_step']
        self.timeSinceOut = 0.0
        tmp_date = self.date_time[0].replace(tzinfo=self.tzinfo)
        wyhr = utils.water_day(tmp_date)[0] * 24.0
        start_step = wyhr  # if restart then it would be higher if this were iSnobal
        # start_step = 0 # if restart then it would be higher if this were iSnobal
        step_time = start_step * self.data_tstep
        # step_time = start_step * 60.0

        self.output_rec['current_time'] = step_time * \
            np.ones(self.output_rec['elevation'].shape)
        self.output_rec['time_since_out'] = self.timeSinceOut * \
            np.ones(self.output_rec['elevation'].shape)

        # get first time step
        self.input1 = {}
        for var, v in self.variable_list.items():
            # get the data desired
            data = getattr(s.distribute[v['module']], v['variable'])

            if data is None:
                data = np.zeros((self.ny, self.nx))
                self._logger.info(
                    'No data from smrf to iSnobal for {} in {}'.format(
                        v, self.date_time[0]))
                self.input1[self.map_val[var]] = data
            else:
                self.input1[self.map_val[var]] = data

        # set ground temp
        self.input1['T_g'] = self.soil_temp * np.ones((self.ny, self.nx))

        self.input1['T_a'] += FREEZE
        self.input1['T_pp'] += FREEZE
        self.input1['T_g'] += FREEZE

        # for counting how many steps since the start of the run
        self.j = 1

        self._logger.info(
            'Finished initializing first time step for iPySnobal')
Beispiel #6
0
    def initialize_aso_updates(self, myawsm, update_fp):
        """
        Read in the ASO update file and parse images by date
        Argument:
                myawsm: instantiated awsm class
                update_fp: file pointer to netCDF with all flights in it
        Return:
                update_info: dictionary of updates
        """

        # last_snow_image = ipw.IPW('/home/micahsandusky/Code/awsfTesting/newupdatetest/snow.2879')

        ##  Update the snow depths in the initialization file using ASO lidar:
        fp = update_fp
        # read in update files
        ds = Dataset(fp, 'r')
        # get all depths, x, y, time
        D_all = ds.variables['depth'][:]
        if np.any(D_all) > 100:
            print('Check D_all')
        x = ds.variables['x'][:]
        y = ds.variables['y'][:]
        times = ds.variables['time']
        ts = times[:]
        # convert time index to dates
        t = nc.num2date(ts, times.units, times.calendar)
        # find wyhr of dates

        t_wyhr = []
        for t1 in t:
            tmp_date = t1.replace(tzinfo=myawsm.tzinfo)
            # get wyhr
            tmpwyhr = int(utils.water_day(tmp_date)[0] * 24)
            t_wyhr.append(tmpwyhr)

        t_wyhr = np.array(t_wyhr)

        # make dictionary of updates
        update_info = OrderedDict()
        keys = range(1, len(t_wyhr) + 1)
        for idk, k in enumerate(keys):
            # make dictionary for each update
            update_info[k] = {}
            # set update number
            update_info[k]['number'] = k
            update_info[k]['date_time'] = t[idk].replace(tzinfo=myawsm.tzinfo)
            update_info[k]['wyhr'] = t_wyhr[idk]
            # set depth
            update_info[k]['depth'] = D_all[idk, :]

        return update_info, x, y
Beispiel #7
0
    def distribute_for_susong1999(self, data, ppt_temp, time, mask=None):
        """
        Docs for susong1999
        """

        if data.sum() > 0:

            # distribute data and set the min/max
            self._distribute(data)
            # see if the mass threshold has been passed
            self.precip = utils.set_min_max(self.precip, self.min, self.max)

            # determine the precip phase and den
            snow_den, perc_snow = snow.calc_phase_and_density(
                ppt_temp, self.precip, nasde_model=self.nasde_model)

            # determine the time since last storm
            stormDays, stormPrecip = storms.time_since_storm(
                self.precip,
                perc_snow,
                time_step=self.time_step / 60 / 24,
                mass=self.ppt_threshold,
                time=self.time_to_end_storm,
                stormDays=self.storm_days,
                stormPrecip=self.storm_total)

            # save the model state
            self.percent_snow = perc_snow
            self.snow_density = snow_den
            self.storm_days = stormDays
            self.storm_total = stormPrecip

        else:

            self.storm_days += self.time_step / 60 / 24

            # make everything else zeros
            self.precip = np.zeros(self.storm_days.shape)
            self.percent_snow = np.zeros(self.storm_days.shape)
            self.snow_density = np.zeros(self.storm_days.shape)

        # day of last storm, this will be used in albedo
        self.last_storm_day = utils.water_day(data.name)[0] - \
            self.storm_days - 0.001

        # get the time since most recent storm
        if mask is not None:
            self.last_storm_day_basin = np.max(mask * self.last_storm_day)
        else:
            self.last_storm_day_basin = np.max(self.last_storm_day)
Beispiel #8
0
    def radiation_dates(self, date_time):
        """
        Calculate some times based on the date for ``stoporad``

        Args:
            date_time: date time object

        Returns:
            (tuple): tuple containing:

                * **wy_day** - day of water year from October 1
                * **wyear** - water year
                * **tz_min_west** - minutes west of UTC for timezone
        """

        # get the current day of water year
        wy_day, wyear = utils.water_day(date_time)

        # determine the minutes west of timezone
        tz_min_west = np.abs(date_time.utcoffset().total_seconds() / 60)

        return wy_day, wyear, tz_min_west
Beispiel #9
0
    def parse_folder_structure(self):
        """Parse the config to get the folder structure

        Raises:
            ValueError: daily_folders can only be ran with smrf_ipysnobal
        """

        if self.config['paths']['path_dr'] is not None:
            self.path_dr = os.path.abspath(self.config['paths']['path_dr'])
        else:
            print('No base path to drive given. Exiting now!')
            sys.exit()

        self.basin = self.config['paths']['basin']
        self.water_year = utils.water_day(self.start_date)[1]
        self.project_name = self.config['paths']['project_name']
        self.project_description = self.config['paths']['project_description']
        self.folder_date_style = self.config['paths']['folder_date_style']

        # setting to output in seperate daily folders
        self.daily_folders = self.config['awsm system']['daily_folders']
        if self.daily_folders and not self.run_smrf_ipysnobal:
            raise ValueError('Cannot run daily_folders with anything other'
                             ' than run_smrf_ipysnobal')
Beispiel #10
0
def run_awsm_daily(myawsm):
    """
    This function is used to run smrf and pysnobal on an hourly scale with
    outputs seperated into daily folders. This will run hourly and allow for
    forecasts like the 18 hour HRRR forecast.
    """
    # get the array of time steps over which to simulate
    d = utils.date_range(myawsm.start_date, myawsm.end_date,
                         pd.to_timedelta(myawsm.time_step, unit='m'),
                         myawsm.tzinfo)

    if myawsm.do_forecast:
        myawsm._logger.warning('Changing PySnobal output to hourly to allow'
                               ' for forecast on each hour')
        myawsm.output_freq = 1

    # set variables for adding a day or hour
    add_day = pd.to_timedelta(23, unit='h')
    add_hour = pd.to_timedelta(1, unit='h')

    start_day = pd.to_datetime(d[0].strftime("%Y%m%d"))
    end_day = pd.to_datetime(d[-1].strftime("%Y%m%d"))
    # if we're starting on an intermediate hour, find timesteps
    # up to first full day
    if d[0] != start_day:
        start_diff = start_day + add_day - d[0]
    else:
        start_diff = add_day
    # find timesteps to end run on last, incomplete day
    if d[-1] != end_day:
        end_diff = d[-1] - end_day
    else:
        end_diff = add_day

    # find total days to run model
    total_days = int(len(d) * myawsm.time_step / (60 * 24))

    # loop through timesteps and initialize runs for each day
    for day in range(total_days):
        # set variable output names
        myawsm.snow_name = 'snow_00'
        myawsm.em_name = 'em_00'
        # set start and end appropriately
        if day == 0:
            myawsm.start_date = d[0]
            myawsm.end_date = d[0] + start_diff
        elif day == total_days - 1:
            myawsm.start_date = start_day + pd.to_timedelta(24 * day, unit='h')
            myawsm.end_date = myawsm.start_date + end_diff
        else:
            myawsm.start_date = start_day + pd.to_timedelta(24 * day, unit='h')
            myawsm.end_date = myawsm.start_date + pd.to_timedelta(23, unit='h')

        # recalculate start and end water year hour
        tmp_date = myawsm.start_date.replace(tzinfo=myawsm.tzinfo)
        tmp_end_date = myawsm.end_date.replace(tzinfo=myawsm.tzinfo)
        myawsm.start_wyhr = int(utils.water_day(tmp_date)[0] * 24)
        myawsm.end_wyhr = int(utils.water_day(tmp_end_date)[0] * 24)

        # find day for labelling the output folder nested one more level in
        daily_append = '{}'.format(myawsm.start_date.strftime("%Y%m%d"))
        myawsm.pathro = os.path.join(myawsm.pathrr, 'output' + daily_append)
        if not os.path.exists(myawsm.pathro):
            os.makedirs(myawsm.pathro)

        # turn off forecast for daily run (will be turned on later if it was true)
        myawsm.config['gridded']['hrrr_forecast_flag'] = False

        # ################# run_model for day ###############################
        myawsm.run_smrf_ipysnobal()

        # reset restart to be last output for next time step
        myawsm.ipy_init_type = 'netcdf_out'
        myawsm.config['ipysnobal initial conditions']['init_file'] = \
            os.path.join(myawsm.pathro, myawsm.snow_name + '.nc')

        # do the 18hr forecast on each hour if forecast is true
        if myawsm.do_forecast:
            # turn forecast back on in smrf config
            myawsm.config['gridded']['hrrr_forecast_flag'] = True

            # now loop through the forecast hours for 18hr forecasts
            d_inner = utils.date_range(
                myawsm.start_date, myawsm.end_date,
                pd.to_timedelta(myawsm.time_step, unit='m'), myawsm.tzinfo)
            for t in d_inner:
                # find hour from start of day
                day_hour = t - pd.to_datetime(d_inner[0].strftime("%Y%m%d"))
                day_hour = int(day_hour / np.timedelta64(1, 'h'))

                # reset output names
                myawsm.snow_name = 'snow_{:02d}'.format(day_hour)
                myawsm.em_name = 'em_{:02d}'.format(day_hour)

                # reset start and end days
                myawsm.start_date = t
                myawsm.end_date = t + pd.to_timedelta(myawsm.n_forecast_hours,
                                                      unit='h')

                # recalculate start and end water year hour
                tmp_date = myawsm.start_date.replace(tzinfo=myawsm.tzinfo)
                tmp_end_date = myawsm.end_date.replace(tzinfo=myawsm.tzinfo)
                myawsm.start_wyhr = int(utils.water_day(tmp_date)[0] * 24)
                myawsm.end_wyhr = int(utils.water_day(tmp_end_date)[0] * 24)

                # run the model for the forecast times
                myawsm.run_smrf_ipysnobal()
Beispiel #11
0
    def run(self):
        """
        mimic the main.c from the Snobal model. Runs Pysnobal while recieving
        forcing data from SMRF queue.

        """
        force_variables = [
            'thermal', 'air_temp', 'vapor_pressure', 'wind_speed', 'net_solar',
            'soil_temp', 'precip', 'percent_snow', 'snow_density',
            'precip_temp'
        ]

        # loop through the input
        # do_data_tstep needs two input records so only go
        # to the last record-1

        data_tstep = self.tstep_info[0]['time_step']
        timeSinceOut = 0.0
        tmp_date = self.date_time[0].replace(tzinfo=self.tzinfo)
        wyhr = utils.water_day(tmp_date)[0] * 24.0
        start_step = wyhr  # if restart then it would be higher if this were iSnobal
        # start_step = 0 # if restart then it would be higher if this were iSnobal
        step_time = start_step * data_tstep
        # step_time = start_step * 60.0

        self.output_rec['current_time'] = step_time * np.ones(
            self.output_rec['elevation'].shape)
        self.output_rec['time_since_out'] = timeSinceOut * np.ones(
            self.output_rec['elevation'].shape)

        # map function from these values to the ones requried by snobal
        map_val = {
            'air_temp': 'T_a',
            'net_solar': 'S_n',
            'thermal': 'I_lw',
            'vapor_pressure': 'e_a',
            'wind_speed': 'u',
            'soil_temp': 'T_g',
            'precip': 'm_pp',
            'percent_snow': 'percent_snow',
            'snow_density': 'rho_snow',
            'precip_temp': 'T_pp'
        }

        # get first timestep
        input1 = {}
        for v in force_variables:
            if v in self.queue.keys():

                data = self.queue[v].get(self.date_time[0],
                                         block=True,
                                         timeout=None)
                if data is None:
                    data = np.zeros((self.ny, self.nx))
                    self._logger.info(
                        'No data from smrf to iSnobal for {} in {}'.format(
                            v, self.date_time[0]))
                    input1[map_val[v]] = data
                else:
                    input1[map_val[v]] = data
            elif v != 'soil_temp':
                self._logger.error('Value not in keys: {}'.format(v))

        # set ground temp
        input1['T_g'] = self.soil_temp * np.ones((self.ny, self.nx))

        input1['T_a'] += FREEZE
        input1['T_pp'] += FREEZE
        input1['T_g'] += FREEZE

        # tell queue we assigned all the variables
        self.queue['isnobal'].put([self.date_time[0], True])
        self._logger.info('Finished initializing first timestep for iPySnobal')

        j = 1
        # for tstep in options['time']['date_time'][953:958]:
        for tstep in self.date_time[1:]:
            # get the output variables then pass to the function
            # this avoids zeroing of the energetics every timestep
            first_step = j
            input2 = {}
            for v in force_variables:
                if v in self.queue.keys():
                    # get variable from smrf queue
                    data = self.queue[v].get(tstep, block=True, timeout=None)
                    if data is None:

                        data = np.zeros((self.ny, self.nx))
                        self._logger.info(
                            'No data from smrf to iSnobal for {} in {}'.format(
                                v, tstep))
                        input2[map_val[v]] = data
                    else:
                        input2[map_val[v]] = data
            # set ground temp
            input2['T_g'] = self.soil_temp * np.ones((self.ny, self.nx))
            # convert variables to Kelvin
            input2['T_a'] += FREEZE
            input2['T_pp'] += FREEZE
            input2['T_g'] += FREEZE

            first_step = j
            if self.updater is not None:
                #if tstep.tz_localize(None) in self.updater.update_dates:
                if tstep in self.updater.update_dates:
                    # self.output_rec = \
                    #     self.updater.do_update_pysnobal(self.output_rec,
                    #                                     tstep.tz_localize(None))
                    self.output_rec = \
                        self.updater.do_update_pysnobal(self.output_rec,
                                                        tstep)
                    first_step = 1

            self._logger.info(
                'running PySnobal for timestep: {}'.format(tstep))
            rt = snobal.do_tstep_grid(input1,
                                      input2,
                                      self.output_rec,
                                      self.tstep_info,
                                      self.options['constants'],
                                      self.params,
                                      first_step=first_step,
                                      nthreads=self.nthreads)

            if rt != -1:
                self.logger.error(
                    'ipysnobal error on time step {}, pixel {}'.format(
                        tstep, rt))
                break

            self._logger.info('Finished timestep: {}'.format(tstep))
            input1 = input2.copy()

            # output at the frequency and the last time step
            if ((j)*(data_tstep/3600.0) % self.options['output']['frequency'] == 0)\
                    or (j == len(self.options['time']['date_time']) - 1):
                io_mod.output_timestep(self.output_rec, tstep, self.options,
                                       self.awsm_output_vars)
                self.output_rec['time_since_out'] = \
                    np.zeros(self.output_rec['elevation'].shape)

            j += 1

            # put the value into the output queue so clean knows it's done
            self.queue['isnobal'].put([tstep, True])
Beispiel #12
0
def run_awsm_daily_ops(config_file):
    """
    Run each day seperately. Calls run_awsm
    """
    # define some formats
    fmt_day = '%Y%m%d'
    fmt_cfg = '%Y-%m-%d %H:%M'
    add_day = pd.to_timedelta(24, unit='h')

    # get config instance
    config = get_user_config(config_file,
                             modules=['smrf', 'awsm'])

    # copy the config and get total start and end
    # config = deepcopy(base_config)
    # set naming style
    config.raw_cfg['paths']['folder_date_style'] = 'day'
    config.apply_recipes()
    config = cast_all_variables(config, config.mcfg)

    # get the water year
    cfg_start_date = pd.to_datetime(config.cfg['time']['start_date'])
    tzinfo = pytz.timezone(config.cfg['time']['time_zone'])
    wy = utils.water_day(cfg_start_date.replace(tzinfo=tzinfo))[1]

    # find the model start depending on restart
    if config.cfg['isnobal restart']['restart_crash']:
        offset_wyhr = int(config.cfg['isnobal restart']['wyh_restart_output'])
        wy_start = pd.to_datetime('{:d}-10-01'.format(wy - 1))
        model_start = wy_start + pd.to_timedelta(offset_wyhr, unit='h')
    else:
        model_start = config.cfg['time']['start_date']

    model_end = config.cfg['time']['end_date']
    isops = config.cfg['paths']['isops']
    if isops:
        devops = 'ops'
    else:
        devops = 'devel'

    # find output location for previous output
    paths = config.cfg['paths']

    prev_out_base = os.path.join(paths['path_dr'],
                                 paths['basin'],
                                 devops,
                                 'wy{}'.format(wy),
                                 paths['proj'],
                                 'runs')

    prev_data_base = os.path.join(paths['path_dr'],
                                  paths['basin'],
                                  devops,
                                  'wy{}'.format(wy),
                                  paths['proj'],
                                  'data')

    # find day of start and end
    start_day = pd.to_datetime(model_start.strftime(fmt_day))
    end_day = pd.to_datetime(model_end.strftime(fmt_day))

    # find total range of run
    ndays = int((end_day-start_day).days) + 1
    date_list = [start_day +
                 pd.to_timedelta(x, unit='D') for x in range(0, ndays)]

    # loop through daily runs and run awsm
    for idd, sd in enumerate(date_list):
        new_config = copy.deepcopy(config)
        if idd > 0:
            new_config.raw_cfg['isnobal restart']['restart_crash'] = False
            new_config.raw_cfg['grid']['thresh_normal'] = 60
            new_config.raw_cfg['grid']['thresh_medium'] = 10
            new_config.raw_cfg['grid']['thresh_small'] = 1
        # get the end of the day
        ed = sd + add_day

        # make sure we're in the model date range
        if sd < model_start:
            sd = model_start
        if ed > model_end:
            ed = model_end

        # set the start and end dates
        new_config.raw_cfg['time']['start_date'] = sd.strftime(fmt_cfg)
        new_config.raw_cfg['time']['end_date'] = ed.strftime(fmt_cfg)

        # reset the initialization
        if idd > 0:
            # find previous output file
            prev_day = sd - pd.to_timedelta(1, unit='D')
            prev_out = os.path.join(prev_out_base,
                                    'run{}'.format(prev_day.strftime(fmt_day)),
                                    'snow.nc')
            # reset if running the model
            if new_config.cfg['awsm master']['model_type'] is not None:
                new_config.raw_cfg['files']['init_type'] = 'netcdf_out'
                new_config.raw_cfg['files']['init_file'] = prev_out

            # if we have a previous storm day file, use it
            prev_storm = os.path.join(prev_data_base,
                                      'data{}'.format(
                                          prev_day.strftime(fmt_day)),
                                      'smrfOutputs', 'storm_days.nc')
            if os.path.isfile(prev_storm):
                new_config.raw_cfg['precip']['storm_days_restart'] = prev_storm

        # apply recipes with new settings
        new_config.apply_recipes()
        new_config = cast_all_variables(new_config, new_config.mcfg)

        # run awsm for the day
        run_awsm(new_config)
Beispiel #13
0
    def __init__(self, config):
        """
        Initialize the model, read config file, start and end date, and logging
        Args:
            config: string path to the config file or inicheck UserConfig instance
        """
        # read the config file and store
        awsm_mcfg = MasterConfig(modules='awsm')
        smrf_mcfg = MasterConfig(modules='smrf')

        if isinstance(config, str):
            if not os.path.isfile(config):
                raise Exception('Configuration file does not exist --> {}'
                                .format(config))
            configFile = config

            try:
                combined_mcfg = MasterConfig(modules=['smrf', 'awsm'])

                # Read in the original users config
                self.ucfg = get_user_config(configFile, mcfg=combined_mcfg)
                self.configFile = configFile

            except UnicodeDecodeError as e:
                print(e)
                raise Exception(('The configuration file is not encoded in '
                                 'UTF-8, please change and retry'))

        elif isinstance(config, UserConfig):
            self.ucfg = config
            configFile = ''

        else:
            raise Exception(
                'Config passed to AWSM is neither file name nor UserConfig instance')

        # get the git version
        self.gitVersion = awsm_utils.getgitinfo()

        # create blank log and error log because logger is not initialized yet
        self.tmp_log = []
        self.tmp_err = []
        self.tmp_warn = []

        # Check the user config file for errors and report issues if any
        self.tmp_log.append("Checking config file for issues...")
        warnings, errors = check_config(self.ucfg)
        print_config_report(warnings, errors)

        self.config = self.ucfg.cfg

        # Exit AWSM if config file has errors
        if len(errors) > 0:
            print("Errors in the config file. "
                  "See configuration status report above.")
            # sys.exit()

        # ################## Decide which modules to run #####################
        self.do_smrf = self.config['awsm master']['run_smrf']
        #self.do_isnobal = self.config['awsm master']['run_isnobal']
        self.model_type = self.config['awsm master']['model_type']
        # self.do_smrf_ipysnobal = \
        #     self.config['awsm master']['run_smrf_ipysnobal']
        # self.do_ipysnobal = self.config['awsm master']['run_ipysnobal']
        self.do_forecast = False
        if 'gridded' in self.config and self.do_smrf:
            self.do_forecast = self.config['gridded']['hrrr_forecast_flag']

            # WARNING: The value here is inferred in SMRF.data.loadGrid. A
            # change here requires a change there
            self.n_forecast_hours = 18

        # Options for converting files
        self.do_make_in = self.config['awsm master']['make_in']
        self.do_make_nc = self.config['awsm master']['make_nc']
        # do report?
        # self.do_report = self.config['awsm master']['do_report']
        self.snowav_config = self.config['awsm master']['snowav_config']

        # options for masking isnobal
        self.mask_isnobal = self.config['awsm master']['mask_isnobal']

        # prompt for making directories
        self.prompt_dirs = self.config['awsm master']['prompt_dirs']

        # store smrf version if running smrf
        self.smrf_version = smrf.__version__

        # ################ Time information ##################
        self.start_date = pd.to_datetime(self.config['time']['start_date'])
        self.end_date = pd.to_datetime(self.config['time']['end_date'])
        self.time_step = self.config['time']['time_step']
        self.tmz = self.config['time']['time_zone']
        self.tzinfo = pytz.timezone(self.config['time']['time_zone'])
        # date to use for finding wy
        tmp_date = self.start_date.replace(tzinfo=self.tzinfo)
        tmp_end_date = self.end_date.replace(tzinfo=self.tzinfo)

        # find water year hour of start and end date
        self.start_wyhr = int(utils.water_day(tmp_date)[0]*24)
        self.end_wyhr = int(utils.water_day(tmp_end_date)[0]*24)

        # find start of water year
        tmpwy = utils.water_day(tmp_date)[1] - 1
        self.wy_start = pd.to_datetime('{:d}-10-01'.format(tmpwy))

        # ################ Store some paths from config file ##################
        # path to the base drive (i.e. /data/blizzard)
        if self.config['paths']['path_dr'] is not None:
            self.path_dr = os.path.abspath(self.config['paths']['path_dr'])
        else:
            print('No base path to drive given. Exiting now!')
            sys.exit()

        # name of your basin (i.e. Tuolumne)
        self.basin = self.config['paths']['basin']
        # water year of run
        self.wy = utils.water_day(tmp_date)[1]
        # if the run is operational or not
        self.isops = self.config['paths']['isops']
        # name of project if not an operational run
        self.proj = self.config['paths']['proj']
        # check for project description
        self.desc = self.config['paths']['desc']
        # find style for folder date stamp
        self.folder_date_style = self.config['paths']['folder_date_style']

        # setting to output in seperate daily folders
        self.daily_folders = self.config['awsm system']['daily_folders']
        if self.daily_folders and not self.run_smrf_ipysnobal:
            raise ValueError('Cannot run daily_folders with anything other'
                             ' than run_smrf_ipysnobal')

        if self.do_forecast:
            self.tmp_log.append('Forecasting set to True')

            # self.fp_forecastdata = self.config['gridded']['wrf_file']
            # if self.fp_forecastdata is None:
            #     self.tmp_err.append('Forecast set to true, '
            #                         'but no grid file given')
            #     print("Errors in the config file. See configuration "
            #           "status report above.")
            #     print(self.tmp_err)
            #     sys.exit()

            if self.config['system']['threading']:
                # Can't run threaded smrf if running forecast_data
                self.tmp_err.append('Cannot run SMRF threaded with'
                                    ' gridded input data')
                print(self.tmp_err)
                sys.exit()

        # Time step mass thresholds for iSnobal
        self.mass_thresh = []
        self.mass_thresh.append(self.config['grid']['thresh_normal'])
        self.mass_thresh.append(self.config['grid']['thresh_medium'])
        self.mass_thresh.append(self.config['grid']['thresh_small'])

        # threads for running iSnobal
        self.ithreads = self.config['awsm system']['ithreads']
        # how often to output form iSnobal
        self.output_freq = self.config['awsm system']['output_frequency']
        # number of timesteps to run if ou don't want to run the whole thing
        self.run_for_nsteps = self.config['awsm system']['run_for_nsteps']
        # pysnobal output variables
        self.pysnobal_output_vars = self.config['awsm system']['variables']
        self.pysnobal_output_vars = [wrd.lower()
                                     for wrd in self.pysnobal_output_vars]
        # snow and emname
        self.snow_name = self.config['awsm system']['snow_name']
        self.em_name = self.config['awsm system']['em_name']

        # options for restarting iSnobal
        self.restart_crash = False
        if self.config['isnobal restart']['restart_crash']:
            self.restart_crash = True
            # self.new_init = self.config['isnobal restart']['new_init']
            self.depth_thresh = self.config['isnobal restart']['depth_thresh']
            self.restart_hr = \
                int(self.config['isnobal restart']['wyh_restart_output'])
            self.restart_folder = self.config['isnobal restart']['output_folders']

        # iSnobal active layer
        self.active_layer = self.config['grid']['active_layer']

        # if we are going to run ipysnobal with smrf
        if self.model_type in ['ipysnobal', 'smrf_ipysnobal']:
            self.ipy_threads = self.ithreads
            self.ipy_init_type = \
                self.config['files']['init_type']
            self.forcing_data_type = \
                self.config['ipysnobal']['forcing_data_type']

        # parameters needed for restart procedure
        self.restart_run = False
        if self.config['isnobal restart']['restart_crash']:
            self.restart_run = True
            # find restart hour datetime
            reset_offset = pd.to_timedelta(self.restart_hr, unit='h')
            # set a new start date for this run
            self.restart_date = self.wy_start + reset_offset
            self.tmp_log.append('Restart date is {}'.format(self.start_date))

        # read in update depth parameters
        self.update_depth = False
        if 'update depth' in self.config:
            self.update_depth = self.config['update depth']['update']
        if self.update_depth:
            self.update_file = self.config['update depth']['update_file']
            self.update_buffer = self.config['update depth']['buffer']
            self.flight_numbers = self.config['update depth']['flight_numbers']
            # if flights to use is not list, make it a list
            if self.flight_numbers is not None:
                if not isinstance(self.flight_numbers, list):
                    self.flight_numbers = [self.flight_numbers]

        # list of sections releated to AWSM
        # These will be removed for smrf config
        self.sec_awsm = awsm_mcfg.cfg.keys()
        self.sec_smrf = smrf_mcfg.cfg.keys()

        # Make rigid directory structure
        self.mk_directories()

        # ################ Topo data for iSnobal ##################
        # get topo stats
        self.csys = self.config['grid']['csys'].upper()
        self.nbits = int(self.config['grid']['nbits'])
        self.soil_temp = self.config['soil_temp']['temp']
        # get topo class
        self.topo = mytopo(self.config['topo'], self.mask_isnobal,
                           self.model_type, self.csys, self.pathdd)

        # ################ Generate config backup ##################
        # if self.config['output']['input_backup']:
        # set location for backup and output backup of awsm sections
        config_backup_location = \
            os.path.join(self.pathdd, 'awsm_config_backup.ini')
        generate_config(self.ucfg, config_backup_location)

        # create log now that directory structure is done
        self.createLog()

        # if we have a model, initialize it
        if self.model_type is not None:
            self.myinit = modelInit(self._logger, self.config, self.topo,
                                    self.start_wyhr, self.pathro, self.pathrr,
                                    self.pathinit, self.wy_start)
Beispiel #14
0
    def distribute_for_marks2017(self, data, precip_temp, ta, time, mask=None):
        """
        Specialized distribute function for working with the new accumulated
        snow density model Marks2017 requires storm total and a corrected
        precipitation as to avoid precip between storms.
        """
        #self.corrected_precip # = data.mul(self.storm_correction)

        if data.sum() > 0.0:
            # Check for time in every storm
            for i, s in self.storms.iterrows():
                storm_start = s['start']
                storm_end = s['end']

                if time >= storm_start and time <= storm_end:
                    # establish storm info
                    self.storm_id = i
                    storm = self.storms.iloc[self.storm_id]
                    self.storming = True
                    break
                else:
                    self.storming = False

            self._logger.debug("Storming? {0}".format(self.storming))
            self._logger.debug("Current Storm ID = {0}".format(self.storm_id))

            # distribute data and set the min/max
            self._distribute(data, zeros=None)
            self.precip = utils.set_min_max(self.precip, self.min, self.max)

            if time == storm_start:
                # Entered into a new storm period distribute the storm total
                self._logger.debug('{0} Entering storm #{1}'.format(
                    data.name, self.storm_id + 1))
                if precip_temp.min() < 2.0:
                    self._logger.debug('''Distributing Total Precip
                                        for Storm #{0}'''.format(
                        self.storm_id + 1))
                    self._distribute(storm[self.stations].astype(float),
                                     other_attribute='storm_total')
                    self.storm_total = utils.set_min_max(
                        self.storm_total, self.min, self.max)

            if self.storming and precip_temp.min() < 2.0:
                self._logger.debug('''Calculating new snow density for
                                    storm #{0}'''.format(self.storm_id + 1))
                # determine the precip phase and den
                snow_den, perc_snow = snow.calc_phase_and_density(
                    precip_temp, self.precip, nasde_model=self.nasde_model)

            else:
                snow_den = np.zeros(self.precip.shape)
                perc_snow = np.zeros(self.precip.shape)

            # calculate decimal days since last storm
            self.storm_days = storms.time_since_storm_pixel(
                self.precip,
                precip_temp,
                perc_snow,
                storming=self.storming,
                time_step=self.time_step / 60.0 / 24.0,
                stormDays=self.storm_days,
                mass=self.ppt_threshold)

        else:
            self.storm_days += self.time_step / 60.0 / 24.0
            self.precip = np.zeros(self.storm_days.shape)
            perc_snow = np.zeros(self.storm_days.shape)
            snow_den = np.zeros(self.storm_days.shape)

        # save the model state
        self.percent_snow = perc_snow
        self.snow_density = snow_den

        # day of last storm, this will be used in albedo
        self.last_storm_day = utils.water_day(data.name)[0] - \
            self.storm_days - 0.001

        # get the time since most recent storm
        if mask is not None:
            self.last_storm_day_basin = np.max(mask * self.last_storm_day)
        else:
            self.last_storm_day_basin = np.max(self.last_storm_day)
Beispiel #15
0
def run():

    fmt_file = fmt = '%Y%m%d'
    #basin = 'SJ'
    basin = 'TB'
    wy = 2017
    fpdir = '/home/micahsandusky/Code/awsfTesting/newupdatetest'

    sj_updates = {}
    sj_updates[2018] = ['2018-04-23', '2018-05-28']
    sj_updates[2017] = []
    # date_lst = ['2018-04-23', '2018-05-28']

    tuol_updates = {}
    tuol_updates[2013] = ['2013-04-03', '2013-04-29', '2013-05-03', '2013-05-25',
                          '2013-06-01', '2013-06-08']
    tuol_updates[2014] = ['2014-03-23', '2014-04-07', '2014-04-13', '2014-04-20',
                          '2014-04-28', '2014-05-02', '2014-05-11', '2014-05-17',
                          '2014-05-27', '2014-05-31', '2014-06-05']
    tuol_updates[2015] = ['2015-02-18', '2015-03-06', '2015-03-25', '2015-04-03',
                          '2015-04-09', '2015-04-15', '2015-04-27', '2015-05-01',
                          '2015-05-28', '2015-06-08']
    tuol_updates[2016] = ['2016-03-26', '2016-04-07', '2016-04-16',
                          '2016-04-26', '2016-05-09', '2016-05-27', '2016-06-07',
                          '2016-06-13', '2016-06-20', '2016-06-25', '2016-07-01',
                          '2016-07-08']
    tuol_updates[2017] = ['2017-01-29', '2017-03-03', '2017-04-01',
                          '2017-05-02', '2017-06-04', '2017-07-09', '2017-07-17',
                          '2017-08-16']

    #tuol_updates[2016] = ['2016-04-16', '2016-04-26']

    if basin == 'TB':
        date_lst = tuol_updates[wy]

    # put into datetime
    date_lst = [pd.to_datetime(dt+' 23:00') for dt in date_lst]
    tzinfo = pytz.timezone('UTC')
    tmp_date = date_lst[0]
    tmp_date = tmp_date.replace(tzinfo=tzinfo)
    # find start of water year
    tmpwy = utils.water_day(tmp_date)[1]
    wy = tmpwy
    start_date = pd.to_datetime('{:d}-10-01'.format(tmpwy-1))

    # get the paths
    fp_lst = ['wy{}/{}{}_SUPERsnow_depth.asc'.format(wy, basin, dt.strftime(fmt_file))
              for dt in date_lst]
    fp_lst = [os.path.join(fpdir,fpu) for fpu in fp_lst]

    if basin == 'TB':
        dem_fp = '/data/blizzard/tuolumne/common_data/topo/tuolx_dem_50m.ipw'
        gisPath = '/home/micahsandusky/Code/awsfTesting/initUpdate/'
        maskPath = os.path.join(gisPath, 'tuolx_mask_50m.ipw')
        if wy < 2017:
            maskPath = os.path.join(gisPath, 'tuolx_hetchy_mask_50m.ipw')
    elif basin == 'SJ':
        dem_fp = '/data/blizzard/sanjoaquin/common_data/topo/SJ_dem_50m.ipw'
        gisPath = '/data/blizzard/sanjoaquin/common_data/topo/'
        maskPath = os.path.join(gisPath, 'SJ_Millerton_mask_50m.ipw')
    else:
        raise ValueError('Wrong basin name')

    mask = ipw.IPW(maskPath).bands[0].data[:]

    output_path = os.path.join(fpdir, 'wy{}'.format(wy))
    fname = 'flight_depths_{}'.format(basin)
    nanval = -9999.0
    nanup = 1000.0


    # #### Now actually do the stuff ####
    # date to use for finding wy
    fname = fname+'_{}'.format(tmpwy)

    # get topo stats from dem
    ts = get_topo_stats(dem_fp, filetype = 'ipw')
    x = ts['x'] # + ts['dv']*np.arange(ts['nx'])
    y = ts['y'] # + ts['du']*np.arange(ts['ny'])

    # get depth array
    depth_arr = read_flight(fp_lst, ts, nanval = nanval, nanup = nanup)

    print(depth_arr)
    # create netcdfs
    ds = output_files(output_path, fname, start_date, x,  y)

    # write to file
    for idt, dt in enumerate(date_lst):
        data = depth_arr[idt,:]#*mask'
        data[mask == 0.0] = np.nan
        output_timestep(ds, data, dt, idt, start_date)

    # close file
    ds.close()