Exemplo n.º 1
0
    def initial(self):
        """ initial part of the polder
         module
        """
        # ************************************************************
        # ***** POLDERS
        # ************************************************************
        settings = LisSettings.instance()
        option = settings.options
        binding = settings.binding

        if option['simulatePolders']:

            PolderSites = loadmap('PolderSites')
            PolderSites = pcraster.ifthen(
                (pcraster.defined(PolderSites) & self.var.IsChannel),
                PolderSites)
            # Get rid of any polders that are not part of the channel network
            # IMPORTANT: current implementation can become unstable with kin.
            # wave!!

            # Flag that is boolean(1) for polder sites and boolean(0) otherwise
            # total storage capacity of Polder area [m3]
            PolderArea = pcraster.lookupscalar(str(binding['TabPolderArea']),
                                               PolderSites)
            PolderLevel = binding['PolderInitialLevelValue']
            # Initial polder level [m]
            self.var.PolderStorageIniM3 = pcraster.cover(
                PolderLevel * PolderArea, pcraster.scalar(0.0))
            # Compute polder storage [m3]
            self.var.PolderStorageM3 = self.var.PolderStorageIniM3
Exemplo n.º 2
0
def lookup_scalar(pcr_map, column_data):
    lut_file = 'lut_reclass_%s.txt' % uuid.uuid4() 
        # needs a unique file name due to bug in pcr.lookupnominal
    np.savetxt(lut_file, column_data, fmt = '%.0f')
    recoded = pcr.lookupscalar(lut_file, pcr.scalar(pcr_map))
    os.remove(lut_file)
    return recoded
Exemplo n.º 3
0
def tableToMapSparse(step, table, map):
    """Reads a pcraster.tbl file for step and assigns using the map in map.
    The behaviour of is a bit similar to the timeinputSparse
    command but in this case for both the tbl file and the map file.
   
    Input: step (int), table (string, path, without the .tbl extension), map
          (ordinal map, without the .map extension)

    How to create your maps/tables:
    if the maps for the timstep is not found the
    defaultmap (without the step appended) is returned or the last map
    that has been found.

    How to use this:
    - if you create the following maps and step ranges from 1 to 400 and
    the map name is "LAI":
    LAI.map
    LAI10.map
    LAI120.map
    LAI300.map

    - LAI.map will be use for step between 1 and 9 (default)
    - LAI10.map will be used between 10 and 119
    etc....
    The same holds for the tables.
    
    
    """
    global debug

    if not hasattr(tableToMapSparse, "_tableToMap_LastTbl"):
        _tableToMap_LastTbl = {}
        _tableToMap_LastMap = {}

    # construct filenames
    fname_map = map + str(step) + ".map"
    fname_tbl = table + str(step) + ".tbl"

    if os.path.exists(fname_map):
        print("found: " + fname_map)
        tableToMapSparse._tableToMap_LastMap[map] = step

    if os.path.exists(fname_tbl):
        print("found: " + fname_tbl)
        tableToMapSparse._tableToMap_LastTbl[table] = step

    if (table in tableToMapSparse._tableToMap_LastTbl) == False:
        fname_tbl = table + ".tbl"
    else:
        fname_tbl = table + str(tableToMapSparse._tableToMap_LastTbl[table]) + ".tbl"

    if (map in tableToMapSparse._tableToMap_LastMap) == False:
        fname_map = map + ".map"
    else:
        fname_map = map + str(tableToMapSparse._tableToMap_LastMap[map]) + ".map"

    rmat = pcr.lookupscalar(str(fname_tbl), str(fname_map))

    return rmat
 def dynamic(self):
   # rewrite input each timestep
   filename = "in.tbl"
   f = open(filename, "w")
   f.write("1 %f\n" % (2.5 * self.currentTimeStep()))
   f.write("2 %f\n" % (3.5 * self.currentTimeStep()))
   f.write("3 %f\n" % (5.5 * self.currentTimeStep()))
   f.close()
   tmp = pcraster.lookupscalar(filename, "soil.map")
   self.report(tmp, "tmp")
Exemplo n.º 5
0
 def dynamic(self):
   # rewrite input each timestep
   filename = "in.tbl"
   f = open(filename, "w")
   f.write("1 %f\n" % (2.5 * self.currentTimeStep()))
   f.write("2 %f\n" % (3.5 * self.currentTimeStep()))
   f.write("3 %f\n" % (5.5 * self.currentTimeStep()))
   f.close()
   tmp = pcraster.lookupscalar(filename, "soil.map")
   self.report(tmp, "tmp")
Exemplo n.º 6
0
    def dynamic(self):

        logger.info("Step 4: Monte Carlo simulation")

        # draw a random value (uniform for the entire map)
        z = pcr.mapnormal()
        #~ self.report(z,"z")

        # constraints, in order to make sure that random values are in the table of "lookup_table_average_thickness"
        z = pcr.max(-5.0, z)
        z = pcr.min(5.0, z)

        # assign average thickness (also uniform for the entire map) based on z
        self.Davg = pcr.lookupscalar(self.lookup_table_average_thickness, z)
        #
        self.report(self.Davg, "davg")
        self.lnDavg = pcr.ln(self.Davg)

        # sedimentary basin thickness (varying over cells and samples)
        lnD = self.F * (self.lnCV * self.lnDavg) + self.lnDavg

        # set the minimum depth (must be bigger than zero)
        minimum_depth = 0.005
        lnD = pcr.max(pcr.ln(minimum_depth), lnD)

        # extrapolation
        lnD = pcr.cover(lnD, \
              pcr.windowaverage(lnD, 1.50*vos.getMapAttributes(self.clone_map_file,"cellsize")))
        lnD = pcr.cover(lnD, \
              pcr.windowaverage(pcr.cover(lnD, pcr.ln(minimum_depth)), 3.00*vos.getMapAttributes(self.clone_map_file,"cellsize")))
        lnD = pcr.cover(lnD, \
              pcr.windowaverage(pcr.cover(lnD, pcr.ln(minimum_depth)), 0.50))

        # smoothing per quarter arc degree
        lnD = pcr.windowaverage(lnD, 0.25)

        # thickness in meter
        self.D = pcr.exp(lnD)

        #~ # smoothing  bottom elevation
        #~ dem_bottom = pcr.windowaverage(self.dem_average - self.D, 0.50)
        #~ # thickness in meter
        #~ self.D = pcr.max(0.0, self.dem_average - dem_bottom)

        #~ # smoothing
        #~ self.D = pcr.windowaverage(self.D, 1.50*vos.getMapAttributes(self.clone_map_file,"cellsize"))

        # accuracy until cm only
        self.D = pcr.rounddown(self.D * 100.) / 100.

        self.report(self.D, "damc")
Exemplo n.º 7
0
    def initial(self):
        """ initial part of the lakes module
        """

        # ************************************************************
        # ***** LAKES
        # ************************************************************
        settings = LisSettings.instance()
        option = settings.options
        binding = settings.binding
        maskinfo = MaskInfo.instance()

        if option['simulateLakes']:

            LakeSitesC = loadmap('LakeSites')
            LakeSitesC[LakeSitesC < 1] = 0
            LakeSitesC[self.var.IsChannel == 0] = 0
            # Get rid of any lakes that are not part of the channel network

            # mask lakes sites when using sub-catchments mask
            self.var.LakeSitesCC = np.compress(LakeSitesC > 0, LakeSitesC)
            self.var.LakeIndex = np.nonzero(LakeSitesC)[0]

            if self.var.LakeSitesCC.size == 0:
                print(LisfloodWarning('There are no lakes. Lakes simulation stops here'))
                option['simulateLakes'] = False
                option['repsimulateLakes'] = False
                return
            # break if no lakes

            self.var.IsStructureKinematic = np.where(LakeSitesC > 0, np.bool8(1), self.var.IsStructureKinematic)
            # Add lake locations to structures map (used to modify LddKinematic
            # and to calculate LddStructuresKinematic)

            # PCRaster part
            # -----------------------
            LakeSitePcr = loadmap('LakeSites', pcr=True)
            LakeSitePcr = pcraster.ifthen((pcraster.defined(LakeSitePcr) & pcraster.boolean(decompress(self.var.IsChannel))), LakeSitePcr)
            IsStructureLake = pcraster.boolean(LakeSitePcr)
            # additional structure map only for lakes to calculate water balance
            self.var.IsUpsOfStructureLake = pcraster.downstream(self.var.LddKinematic, pcraster.cover(IsStructureLake, 0))
            # Get all pixels just upstream of lakes
            # -----------------------

            self.var.LakeInflowOldCC = np.bincount(self.var.downstruct, weights=self.var.ChanQ)[self.var.LakeIndex]
            # for Modified Puls Method the Q(inflow)1 has to be used.
            # It is assumed that this is the same as Q(inflow)2 for the first timestep
            # has to be checked if this works in forecasting mode!

            LakeArea = pcraster.lookupscalar(str(binding['TabLakeArea']), LakeSitePcr)
            LakeAreaC = compressArray(LakeArea)
            self.var.LakeAreaCC = np.compress(LakeSitesC > 0, LakeAreaC)

            # Surface area of each lake [m2]
            LakeA = pcraster.lookupscalar(str(binding['TabLakeA']), LakeSitePcr)
            LakeAC = compressArray(LakeA) * loadmap('LakeMultiplier')
            self.var.LakeACC = np.compress(LakeSitesC > 0, LakeAC)
            # Lake parameter A (suggested  value equal to outflow width in [m])
            # multiplied with the calibration parameter LakeMultiplier

            LakeInitialLevelValue  = loadmap('LakeInitialLevelValue')
            if np.max(LakeInitialLevelValue) == -9999:
                LakeAvNetInflowEstimate = pcraster.lookupscalar(str(binding['TabLakeAvNetInflowEstimate']), LakeSitePcr)
                LakeAvNetC = compressArray(LakeAvNetInflowEstimate)
                self.var.LakeAvNetCC = np.compress(LakeSitesC > 0, LakeAvNetC)

                LakeStorageIniM3CC = self.var.LakeAreaCC * np.sqrt(self.var.LakeAvNetCC / self.var.LakeACC)
                # Initial lake storage [m3]  based on: S = LakeArea * H = LakeArea
                # * sqrt(Q/a)
                self.var.LakeLevelCC = LakeStorageIniM3CC / self.var.LakeAreaCC
            else:
                self.var.LakeLevelCC = np.compress(LakeSitesC > 0, LakeInitialLevelValue)
                LakeStorageIniM3CC = self.var.LakeAreaCC * self.var.LakeLevelCC
                # Initial lake storage [m3]  based on: S = LakeArea * H

                self.var.LakeAvNetCC = np.compress(LakeSitesC > 0, loadmap('PrevDischarge'))

            # Repeatedly used expressions in lake routine

            # NEW Lake Routine using Modified Puls Method (see Maniak, p.331ff)
            # (Qin1 + Qin2)/2 - (Qout1 + Qout2)/2 = (S2 - S1)/dtime
            # changed into:
            # (S2/dtime + Qout2/2) = (S1/dtime + Qout1/2) - Qout1 + (Qin1 + Qin2)/2
            # outgoing discharge (Qout) are linked to storage (S) by elevation.
            # Now some assumption to make life easier:
            # 1.) storage volume is increase proportional to elevation: S = A * H
            #      H: elevation, A: area of lake
            # 2.) outgoing discharge = c * b * H **2.0 (c: weir constant, b: width)
            #      2.0 because it fits to a parabolic cross section see Aigner 2008
            #      (and it is much easier to calculate (that's the main reason)
            # c for a perfect weir with mu=0.577 and Poleni: 2/3 mu * sqrt(2*g) = 1.7
            # c for a parabolic weir: around 1.8
            # because it is a imperfect weir: C = c* 0.85 = 1.5
            # results in a formular : Q = 1.5 * b * H ** 2 = a*H**2 -> H =
            # sqrt(Q/a)
            self.var.LakeFactor = self.var.LakeAreaCC / (self.var.DtRouting * np.sqrt(self.var.LakeACC))

            #  solving the equation  (S2/dtime + Qout2/2) = (S1/dtime + Qout1/2) - Qout1 + (Qin1 + Qin2)/2
            #  SI = (S2/dtime + Qout2/2) =  (A*H)/DtRouting + Q/2 = A/(DtRouting*sqrt(a)  * sqrt(Q) + Q/2
            #  -> replacement: A/(DtRouting*sqrt(a)) = Lakefactor, Y = sqrt(Q)
            #  Y**2 + 2*Lakefactor*Y-2*SI=0
            # solution of this quadratic equation:
            # Q=sqr(-LakeFactor+sqrt(sqr(LakeFactor)+2*SI))

            self.var.LakeFactorSqr = np.square(self.var.LakeFactor)
            # for faster calculation inside dynamic section

            LakeStorageIndicator = LakeStorageIniM3CC / self.var.DtRouting + self.var.LakeAvNetCC / 2
            # SI = S/dt + Q/2
            self.var.LakeOutflow = np.square(-self.var.LakeFactor + np.sqrt(self.var.LakeFactorSqr + 2 * LakeStorageIndicator))
            # solution of quadratic equation
            # it is as easy as this because:
            # 1. storage volume is increase proportional to elevation
            # 2. Q= a *H **2.0  (if you choose Q= a *H **1.5 you have to solve
            # the formula of Cardano)

            self.var.LakeStorageM3CC = LakeStorageIniM3CC.copy()
            self.var.LakeStorageM3BalanceCC = LakeStorageIniM3CC.copy()

            self.var.LakeStorageIniM3 = maskinfo.in_zero()
            self.var.LakeLevel = maskinfo.in_zero()
            np.put(self.var.LakeStorageIniM3,self.var.LakeIndex,LakeStorageIniM3CC)
            self.var.LakeStorageM3 = self.var.LakeStorageIniM3.copy()
            np.put(self.var.LakeLevel, self.var.LakeIndex, self.var.LakeLevelCC)

            self.var.EWLakeCUMM3 = maskinfo.in_zero()
    def __init__(self, clone_map_file,\
                       input_thickness_netcdf_file,\
                       input_thickness_var_name   ,\
                       margat_aquifers,\
                       tmp_directory,
                       landmask = None,
                       arcdegree = True):

        object.__init__(self)

        # aquifer table from Margat and van der Gun 
        self.margat_aquifers = margat_aquifers

        # clone map
        self.clone_map_file = clone_map_file
        self.clone_map_attr = vos.getMapAttributesALL(self.clone_map_file)
        if arcdegree == True:
            self.clone_map_attr['cellsize'] = round(self.clone_map_attr['cellsize'] * 360000.)/360000.
        xmin = self.clone_map_attr['xUL']
        xmax = xmin + self.clone_map_attr['cols'] * self.clone_map_attr['cellsize']
        ymax = self.clone_map_attr['yUL']
        ymin = ymax - self.clone_map_attr['rows'] * self.clone_map_attr['cellsize']
        pcr.setclone(self.clone_map_file)

        # temporary directory 
        self.tmp_directory = tmp_directory

        # thickness approximation (unit: m, file in netcdf with variable name = average 
        self.approx_thick = vos.netcdf2PCRobjCloneWithoutTime(input_thickness_netcdf_file,\
                                                              input_thickness_var_name,\
                                                              self.clone_map_file)
        # set minimum value to 0.1 mm
        self.approx_thick = pcr.max(0.0001, self.approx_thick)

        # rasterize the shape file 
        #               -        
        # save current directory and move to temporary directory
        current_dir = str(os.getcwd()+"/")
        os.chdir(str(self.tmp_directory))
        #
        cmd_line  = 'gdal_rasterize -a MARGAT '                                     # layer name = MARGAT
        cmd_line += '-te '+str(xmin)+' '+str(ymin)+' '+str(xmax)+' '+str(ymax)+ ' '       
        cmd_line += '-tr '+str(self.clone_map_attr['cellsize'])+' '+str(self.clone_map_attr['cellsize'])+' '
        cmd_line += str(margat_aquifers['shapefile'])+' '
        cmd_line += 'tmp.tif'
        print(cmd_line); os.system(cmd_line)
        #
        # make it nomial
        cmd_line = 'pcrcalc tmp.map = "nominal(tmp.tif)"' 
        print(cmd_line); os.system(cmd_line)
        #
        # make sure that the clone map is correct
        cmd_line = 'mapattr -c '+str(self.clone_map_file)+' tmp.map'
        print(cmd_line); os.system(cmd_line)
        #
        # read the map
        self.margat_aquifer_map = pcr.nominal(pcr.readmap("tmp.map"))
        #
        # clean temporary directory and return to the original directory
        vos.clean_tmp_dir(self.tmp_directory)
        os.chdir(current_dir)
        
        # extend the extent of each aquifer
        self.margat_aquifer_map = pcr.cover(self.margat_aquifer_map, 
                                  pcr.windowmajority(self.margat_aquifer_map, 1.25))

        # assign aquifer thickness, unit: m (lookuptable operation) 
        self.margat_aquifer_thickness = pcr.lookupscalar(margat_aquifers['txt_table'], self.margat_aquifer_map)
        self.margat_aquifer_thickness = pcr.ifthen(self.margat_aquifer_thickness > 0., \
                                                   self.margat_aquifer_thickness)
        #~ pcr.report(self.margat_aquifer_thickness,"thick.map"); os.system("aguila thick.map")

        # aquifer map
        self.margat_aquifer_map       = pcr.ifthen(self.margat_aquifer_thickness > 0., self.margat_aquifer_map)        
        
        # looping per aquifer: cirrecting or rescaling 
        aquifer_ids = np.unique(pcr.pcr2numpy(pcr.scalar(self.margat_aquifer_map), vos.MV))
        aquifer_ids = aquifer_ids[aquifer_ids > 0]
        aquifer_ids = aquifer_ids[aquifer_ids < 10000]
        self.rescaled_thickness = None
        for id in aquifer_ids:
            rescaled_thickness = self.correction_per_aquifer(id)
            try:
                self.rescaled_thickness = pcr.cover(self.rescaled_thickness, rescaled_thickness)
            except:
                self.rescaled_thickness = rescaled_thickness
        
        # integrating
        ln_aquifer_thickness  = self.mapFilling( pcr.ln(self.rescaled_thickness), pcr.ln(self.approx_thick) )
        self.aquifer_thickness = pcr.exp(ln_aquifer_thickness)
        #~ pcr.report(self.aquifer_thickness,"thick.map"); os.system("aguila thick.map")

        # cropping only in the landmask region
        if landmask == None: landmask = self.clone_map_file
        self.landmask = pcr.defined(vos.readPCRmapClone(landmask,self.clone_map_file,self.tmp_directory))
        #~ pcr.report(self.landmask,"test.map"); os.system("aguila test.map")

        self.aquifer_thickness = pcr.ifthen(self.landmask, self.aquifer_thickness)
Exemplo n.º 9
0
    def __init__(self, clone_map_file, \
                       dem_average_netcdf, dem_floodplain_netcdf, ldd_netcdf, \
                       lookup_table_average_thickness, lookup_table_zscore, \
                       number_of_samples, include_percentile = True,\
                       threshold_sedimentary_basin = 50.0, elevation_F_min = 0.0, elevation_F_max = 50.0):  # values defined in de Graaf et al. (2014)

        DynamicModel.__init__(self)
        MonteCarloModel.__init__(self)

        msg = "\n"
        msg += "\n"
        msg += 'For each step, please refer to de Graaf et al. (2014).'
        msg += "\n"
        logger.info(msg)

        # set clone
        self.clone_map_file = clone_map_file
        pcr.setclone(clone_map_file)

        # an option to include_percentile or not
        self.include_percentile = include_percentile

        # number of samples
        self.number_of_samples = pcr.scalar(number_of_samples)

        logger.info(
            "Step 1: Identify the cells belonging to the sedimentary basin region."
        )
        dem_average    = vos.netcdf2PCRobjCloneWithoutTime(dem_average_netcdf['file_name'],\
                                                           dem_average_netcdf['variable_name'],\
                                                           clone_map_file)
        dem_average = pcr.max(0.0, dem_average)
        self.dem_average = pcr.cover(dem_average, 0.0)

        dem_floodplain = vos.netcdf2PCRobjCloneWithoutTime(
            dem_floodplain_netcdf['file_name'],
            dem_floodplain_netcdf['variable_name'], clone_map_file)
        dem_floodplain = pcr.max(0.0, dem_floodplain)

        lddMap = vos.netcdf2PCRobjCloneWithoutTime(ldd_netcdf['file_name'],
                                                   ldd_netcdf['variable_name'],
                                                   clone_map_file)
        self.lddMap = pcr.lddrepair(pcr.lddrepair(pcr.ldd(lddMap)))
        self.landmask = pcr.defined(self.lddMap)

        elevation_F = dem_average - dem_floodplain

        sedimentary_basin_extent = pcr.ifthen(
            elevation_F < pcr.scalar(threshold_sedimentary_basin),
            pcr.boolean(1))

        # include the continuity along the river network
        sedimentary_basin_extent = pcr.windowmajority(
            sedimentary_basin_extent,
            3.00 * vos.getMapAttributes(clone_map_file, "cellsize"))
        sedimentary_basin_extent = pcr.cover(sedimentary_basin_extent, \
                                   pcr.path(self.lddMap, pcr.defined(sedimentary_basin_extent)))

        # TODO: We should also include the extent of major aquifer basins and unconsolidated sediments in the GLiM map.

        elevation_F = pcr.ifthen(sedimentary_basin_extent, elevation_F)
        elevation_F = pcr.max(0.0, elevation_F)
        elevation_F = pcr.min(50., elevation_F)

        logger.info(
            "Step 2: Calculate relative difference and associate z_score.")
        relative_elevation_F = pcr.scalar(
            1.0) - (elevation_F - elevation_F_min) / (elevation_F_max -
                                                      elevation_F_min)

        z_score_relat_elev_F = pcr.lookupscalar(lookup_table_zscore,
                                                relative_elevation_F)
        self.F = z_score_relat_elev_F  # zscore (varying over the map)

        # maximum and minimum z_score
        self.F = pcr.min(3.75, self.F)
        self.F = pcr.max(-10.00, self.F)

        logger.info(
            "Step 3: Assign average and variation of aquifer thickness.")
        self.lookup_table_average_thickness = lookup_table_average_thickness
        self.lnCV = pcr.scalar(
            0.1
        )  # According to Inge, this lnCV value corresponds to the table "lookup_table_average_thickness".
Exemplo n.º 10
0
    def __init__(self):
        # Print model info
        print('The Spatial Processes in HYdrology (SPHY) model is')
        print(
            'developed and owned by FutureWater, Wageningen, The Netherlands')
        print('Version 3.0, released June 2019')
        print(' ')

        #-Missing value definition
        self.MV = -9999

        # Read the modules to be used
        self.GlacFLAG = config.getint('MODULES', 'GlacFLAG')
        self.SnowFLAG = config.getint('MODULES', 'SnowFLAG')
        self.RoutFLAG = config.getint('MODULES', 'RoutFLAG')
        self.ResFLAG = config.getint('MODULES', 'ResFLAG')
        self.LakeFLAG = config.getint('MODULES', 'LakeFLAG')
        self.DynVegFLAG = config.getint('MODULES', 'DynVegFLAG')
        self.GroundFLAG = config.getint('MODULES', 'GroundFLAG')
        self.SedFLAG = config.getint('MODULES', 'SedFLAG')
        self.SedTransFLAG = config.getint('MODULES', 'SedTransFLAG')

        # import the required modules
        import datetime, calendar, ET, rootzone, subzone
        import utilities.reporting as reporting
        import utilities.timecalc as timecalc
        import utilities.netcdf2PCraster as netcdf2PCraster
        from math import pi
        #-standard python modules
        self.datetime = datetime
        self.calendar = calendar
        self.pi = pi
        #-FW defined modules
        self.reporting = reporting
        self.timecalc = timecalc
        self.netcdf2PCraster = netcdf2PCraster
        self.ET = ET
        self.rootzone = rootzone
        self.subzone = subzone
        del datetime, calendar, pi, reporting, timecalc, ET, rootzone, subzone
        #-import additional modules if required
        if self.GlacFLAG == 1:
            self.SnowFLAG = 1
            self.GroundFLAG = 1

        #-read the input and output directories from the configuration file
        self.inpath = config.get('DIRS', 'inputdir')
        self.outpath = config.get('DIRS', 'outputdir')

        #-set the timing criteria
        sy = config.getint('TIMING', 'startyear')
        sm = config.getint('TIMING', 'startmonth')
        sd = config.getint('TIMING', 'startday')
        ey = config.getint('TIMING', 'endyear')
        em = config.getint('TIMING', 'endmonth')
        ed = config.getint('TIMING', 'endday')
        self.startdate = self.datetime.datetime(sy, sm, sd)
        self.enddate = self.datetime.datetime(ey, em, ed)
        self.dateAfterUpdate = self.startdate - self.datetime.timedelta(
            days=1
        )  #-only required for glacier retreat (create dummy value here to introduce the variable)

        #-set date input for reporting
        self.startYear = sy
        self.endYear = ey
        self.spinUpYears = config.getint('TIMING', 'spinupyears')
        self.simYears = self.endYear - self.startYear - self.spinUpYears + 1

        #-set the 2000 julian date number
        self.julian_date_2000 = 2451545
        #-read name of reporting table
        self.RepTab = config.get('REPORTING', 'RepTab')
        #-set the option to calculate the fluxes in mm for the upstream area
        self.mm_rep_FLAG = config.getint('REPORTING', 'mm_rep_FLAG')

        #-set the option to calculate the fluxes per component in mm for the upstream area
        pars = [
            'Prec', 'ETa', 'GMelt', 'QSNOW', 'QROOTR', 'QROOTD', 'QRAIN',
            'QGLAC', 'QBASE', 'QTOT', 'Seep'
        ]
        for i in pars:
            var = i + '_mm_FLAG'
            setattr(self, var, config.getint('REPORTING', var))

        #-set the option to calculate the timeseries of the water balance
        self.wbal_TSS_FLAG = config.getint('REPORTING', 'wbal_TSS_FLAG')

        #-setting clone map
        self.clonefile = self.inpath + config.get('GENERAL', 'mask')
        pcr.setclone(self.clonefile)
        self.clone = pcr.ifthen(pcr.readmap(self.clonefile), pcr.boolean(1))

        self.cellArea = pcr.cellvalue(pcr.cellarea(), 1)[0]

        #-read general maps
        self.DEM = pcr.readmap(self.inpath + config.get('GENERAL', 'dem'))
        self.Slope = pcr.readmap(self.inpath + config.get('GENERAL', 'Slope'))
        self.Locations = pcr.readmap(self.inpath +
                                     config.get('GENERAL', 'locations'))

        #-read soil calibration fractions
        self.RootFieldFrac = config.getfloat('SOIL_CAL', 'RootFieldFrac')
        self.RootSatFrac = config.getfloat('SOIL_CAL', 'RootSatFrac')
        self.RootDryFrac = config.getfloat('SOIL_CAL', 'RootDryFrac')
        self.RootWiltFrac = config.getfloat('SOIL_CAL', 'RootWiltFrac')
        self.RootKsatFrac = config.getfloat('SOIL_CAL', 'RootKsatFrac')

        #-read soil maps
        #-check for PedotransferFLAG
        self.PedotransferFLAG = config.getint('PEDOTRANSFER',
                                              'PedotransferFLAG')
        #-if pedotransfer functions are used read the sand, clay, organic matter and bulk density maps, otherwise read the soil hydraulic properties
        if self.PedotransferFLAG == 1:
            import utilities.pedotransfer
            self.pedotransfer = utilities.pedotransfer
            del utilities.pedotransfer

            #-read init processes pedotransfer
            self.pedotransfer.init(self, pcr, config, np)
        else:
            #self.Soil = pcr.readmap(self.inpath + config.get('SOIL','Soil'))
            self.RootFieldMap = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootFieldMap')) * self.RootFieldFrac
            self.RootSatMap = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootSatMap')) * self.RootSatFrac
            self.RootDryMap = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootDryMap')) * self.RootDryFrac
            self.RootWiltMap = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootWiltMap')) * self.RootWiltFrac
            self.RootKsat = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootKsat')) * self.RootKsatFrac
            self.SubSatMap = pcr.readmap(self.inpath +
                                         config.get('SOIL', 'SubSatMap'))
            self.SubFieldMap = pcr.readmap(self.inpath +
                                           config.get('SOIL', 'SubFieldMap'))
            self.SubKsat = pcr.readmap(self.inpath +
                                       config.get('SOIL', 'SubKsat'))
            self.RootDrainVel = self.RootKsat * self.Slope

        #-Read and set the soil parameters
        pars = ['CapRiseMax', 'RootDepthFlat', 'SubDepthFlat']
        for i in pars:
            try:
                setattr(self, i,
                        pcr.readmap(self.inpath + config.get('SOILPARS', i)))
            except:
                setattr(self, i, config.getfloat('SOILPARS', i))

        # groundwater storage as third storage layer. This is used instead of a fixed bottomflux
        if self.GroundFLAG == 1:
            import modules.groundwater
            self.groundwater = modules.groundwater
            del modules.groundwater

            #-read init processes groundwater
            self.groundwater.init(self, pcr, config)

        else:
            # if groundwater module is not used, read seepage and gwl_base
            self.SeepStatFLAG = config.getint('SOILPARS', 'SeepStatic')
            if self.SeepStatFLAG == 0:  # set the seepage map series
                self.Seepmaps = self.inpath + config.get('SOILPARS', 'SeePage')
            else:  #-set a static map or value for seepage
                try:
                    self.SeePage = pcr.readmap(
                        self.inpath + config.get('SOILPARS', 'SeePage'))
                except:
                    self.SeePage = config.getfloat('SOILPARS', 'SeePage')
            try:
                self.GWL_base = pcr.readmap(self.inpath +
                                            config.get('SOILPARS', 'GWL_base'))
            except:
                self.GWL_base = config.getfloat('SOILPARS', 'GWL_base')

            self.SubDrainVel = self.SubKsat * self.Slope

        #-calculate soil properties
        self.RootField = self.RootFieldMap * self.RootDepthFlat
        self.RootSat = self.RootSatMap * self.RootDepthFlat
        self.RootDry = self.RootDryMap * self.RootDepthFlat
        self.RootWilt = self.RootWiltMap * self.RootDepthFlat
        self.SubSat = self.SubSatMap * self.SubDepthFlat
        self.SubField = self.SubFieldMap * self.SubDepthFlat
        self.RootTT = pcr.max((self.RootSat - self.RootField) / self.RootKsat,
                              0.0001)
        self.SubTT = pcr.max((self.SubSat - self.SubField) / self.SubKsat,
                             0.0001)
        # soil max and soil min for scaling of gwl if groundwater module is not used
        if self.GroundFLAG == 0:
            self.SoilMax = self.RootSat + self.SubSat
            self.SoilMin = self.RootDry + self.SubField

        #-read land use map
        self.LandUse = pcr.readmap(self.inpath +
                                   config.get('LANDUSE', 'LandUse'))

        #-Use the dynamic vegetation module
        if self.DynVegFLAG == 1:
            #-import dynamic vegetation module
            import modules.dynamic_veg
            self.dynamic_veg = modules.dynamic_veg
            del modules.dynamic_veg

            #-read init processes dynamic vegetation
            self.dynamic_veg.init(self, pcr, config)
        #-read the crop coefficient table if the dynamic vegetation module is not used
        else:
            self.KcStatFLAG = config.getint('LANDUSE', 'KCstatic')
            if self.KcStatFLAG == 1:
                #-read land use map and kc table
                self.kc_table = self.inpath + config.get('LANDUSE', 'CropFac')
                self.Kc = pcr.lookupscalar(self.kc_table, self.LandUse)
            else:
                #-set the kc map series
                self.Kcmaps = self.inpath + config.get('LANDUSE', 'KC')

        #-read the p factor table if the plant water stress module is used
        self.PlantWaterStressFLAG = config.getint('PWS', 'PWS_FLAG')
        if self.PlantWaterStressFLAG == 1:
            PFactor = self.inpath + config.get('PWS', 'PFactor')
            self.PMap = pcr.lookupscalar(PFactor, self.LandUse)

        #-read and set glacier maps and parameters if glacier module is used
        if self.GlacFLAG:
            #-import glacier module
            import modules.glacier
            self.glacier = modules.glacier
            del modules.glacier

            #-read init processes glacier module
            self.glacier.init(self, pcr, config, pd, np, os)

        #-read and set snow maps and parameters if snow modules are used
        if self.SnowFLAG == 1:
            #-import snow module
            import modules.snow
            self.snow = modules.snow
            del modules.snow

            #-read init processes glacier module
            self.snow.init(self, pcr, config)

        #-read and set climate forcing and the calculation of etref

        #-read precipitation data
        #-read flag for precipitation forcing by netcdf
        self.precNetcdfFLAG = config.getint('CLIMATE', 'precNetcdfFLAG')
        if self.precNetcdfFLAG == 1:
            #-read configuration for forcing by netcdf
            self.netcdf2PCraster.getConfigNetcdf(self, config, 'Prec',
                                                 'CLIMATE')

            #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
            self.netcdf2PCraster.netcdf2pcrInit(self, pcr, 'Prec')
        else:
            #-read precipitation forcing folder
            self.Prec = self.inpath + config.get('CLIMATE', 'Prec')

        #-read precipitation data
        #-read flag for temperature forcing by netcdf
        self.tempNetcdfFLAG = config.getint('CLIMATE', 'tempNetcdfFLAG')
        if self.tempNetcdfFLAG == 1:
            #-read configuration for forcing by netcdf
            self.netcdf2PCraster.getConfigNetcdf(self, config, 'Temp',
                                                 'CLIMATE')

            #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
            self.netcdf2PCraster.netcdf2pcrInit(self, pcr, 'Temp')
        else:
            #-read temperature forcing folder
            self.Tair = self.inpath + config.get('CLIMATE', 'Tair')
        #-read flag for etref time series input
        self.ETREF_FLAG = config.getint('ETREF', 'ETREF_FLAG')
        #-determine the use of a given etref time-series or calculate etref using Hargreaves
        if self.ETREF_FLAG == 1:
            self.ETref = self.inpath + config.get('ETREF', 'ETref')
        else:
            self.Lat = pcr.readmap(self.inpath + config.get('ETREF', 'Lat'))
            #-read flag for minimum temperature forcing by netcdf
            self.TminNetcdfFLAG = config.getint('ETREF', 'TminNetcdfFLAG')
            if self.TminNetcdfFLAG == 1:
                #-read configuration for forcing by netcdf
                self.netcdf2PCraster.getConfigNetcdf(self, config, 'Tmin',
                                                     'ETREF')

                #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
                self.netcdf2PCraster.netcdf2pcrInit(self, pcr, 'Tmin')
            else:
                self.Tmin = self.inpath + config.get('ETREF', 'Tmin')
            #-read flag for maximum temperature forcing by netcdf
            self.TmaxNetcdfFLAG = config.getint('ETREF', 'TmaxNetcdfFLAG')
            if self.TmaxNetcdfFLAG == 1:
                #-read configuration for forcing by netcdf
                self.netcdf2PCraster.getConfigNetcdf(self, config, 'Tmax',
                                                     'ETREF')

                #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
                self.netcdf2PCraster.netcdf2pcrInit(self, pcr, 'Tmax')
            else:
                self.Tmax = self.inpath + config.get('ETREF', 'Tmax')
            self.Gsc = config.getfloat('ETREF', 'Gsc')
            import hargreaves
            self.Hargreaves = hargreaves
            del hargreaves

        #-read and set routing maps and parameters
        if self.RoutFLAG == 1:
            import modules.routing
            self.routing = modules.routing
            del modules.routing

            #-read init processes routing
            self.routing.init(self, pcr, config)

        #-read and set routing maps and parameters
        if self.ResFLAG == 1 or self.LakeFLAG == 1:
            #-import advanced routing module
            import modules.advanced_routing
            self.advanced_routing = modules.advanced_routing
            del modules.advanced_routing

            #-read init processes advanced routing
            self.advanced_routing.init(self, pcr, config)

        #-read lake maps and parameters if lake module is used
        if self.LakeFLAG == 1:
            #-import lakes module
            import modules.lakes
            self.lakes = modules.lakes
            del modules.lakes

            #-read init processes lakes
            self.lakes.init(self, pcr, config)

        #-read reservior maps and parameters if reservoir module is used
        if self.ResFLAG == 1:
            #-import reservoirs module
            import modules.reservoirs
            self.reservoirs = modules.reservoirs
            del modules.reservoirs

            #-read init processes reservoirs
            self.reservoirs.init(self, pcr, config)

        #-read flag for calculation of ET in reservoirs
        self.ETOpenWaterFLAG = config.getint('OPENWATER', 'ETOpenWaterFLAG')
        if self.ETOpenWaterFLAG == 1:
            #-read kc value for open water
            self.kcOpenWater = config.getfloat('OPENWATER', 'kcOpenWater')
            #-read openwater fraction map
            self.openWaterFrac = pcr.readmap(
                self.inpath + config.get('OPENWATER', 'openWaterFrac'))
            #-determine openwater map with values of each reservoir/lake in the extent of the openwater
            self.openWater = pcr.ifthenelse(self.openWaterFrac > 0,
                                            pcr.scalar(1), pcr.scalar(0))
            self.openWaterNominal = pcr.clump(pcr.nominal(self.openWater))
            self.openWaterNominal = pcr.nominal(
                pcr.areamaximum(pcr.scalar(self.ResID), self.openWaterNominal))
        else:
            #-set all cells to 0 for openwater fraction map
            self.openWaterFrac = self.DEM * 0
            self.openWater = 0
            self.ETOpenWater = 0

        #-read maps and parameters for infiltration excess
        self.InfilFLAG = config.getfloat('INFILTRATION', 'Infil_excess')
        if self.InfilFLAG == 1:
            self.K_eff = config.getfloat('INFILTRATION', 'K_eff')
            try:
                self.Alpha = config.getfloat('INFILTRATION', 'Alpha')
            except:
                self.Alpha = pcr.readmap(self.inpath +
                                         config.get('INFILTRATION', 'Alpha'))
            try:
                self.Labda_Infil = config.getfloat('INFILTRATION',
                                                   'Labda_infil')
            except:
                self.Labda_Infil = pcr.readmap(
                    self.inpath + config.get('INFILTRATION', 'Labda_infil'))
            try:
                self.paved_table = self.inpath + config.get(
                    'INFILTRATION', 'PavedFrac')
                self.pavedFrac = pcr.lookupscalar(self.paved_table,
                                                  self.LandUse)
            except:
                self.pavedFrac = 0

        #-read maps and parameters for soil erosion
        if self.SedFLAG == 1:
            #-read soil erosion model selector (1 for MUSLE, 2 for MMF)
            self.SedModel = config.getfloat('SEDIMENT', 'SedModel')

            #-read rock fraction map
            self.RockFrac = pcr.readmap(self.inpath +
                                        config.get('SEDIMENT', 'RockFrac'))

            #-read MUSLE input parameters
            if self.SedModel == 1:
                #-import musle module
                import modules.musle
                self.musle = modules.musle
                del modules.musle

                #-read init processes musle
                self.musle.init(self, pcr, config)

            #-read MMF input parameters
            if self.SedModel == 2:
                #-import mmf module
                import modules.mmf
                self.mmf = modules.mmf
                del modules.mmf

                #-read init processes mmf
                self.mmf.init(self, pcr, config)

            #-read input parameters for sediment transport
            if self.SedTransFLAG == 1:
                #-import sediment transport module
                import modules.sediment_transport
                self.sediment_transport = modules.sediment_transport
                del modules.sediment_transport

                #-read init processes sediment transport
                self.sediment_transport.init(self, pcr, config, csv, np)

        #-set the global option for radians
        pcr.setglobaloption('radians')