def initial(self): # create sample points self.nullMask = self.readmap(self.inputfolder + '/nullmask') self.oneMask = self.readmap(self.inputfolder + '/onemask') # load a map with random uniform values self.uniformMap = self.readmap(self.inputfolder + '/uniform') # AT SOME POINT WITH STOCHASTIC INPUT # in that case land use should not include urban self.landuse = self.readmap(self.inputfolder + '/init_lu90') self.landuse00 = self.readmap(self.inputfolder + '/init_lu00') self.landuse06 = self.readmap(self.inputfolder + '/init_lu06') self.landuse12 = self.readmap(self.inputfolder + '/init_lu12') self.landuse18 = self.readmap(self.inputfolder + '/init_lu18') self.initialUrb = self.landuse == 1 self.roads = self.readmap(self.inputfolder + '/roads') self.noGoMap = cover(self.readmap(self.inputfolder + '/nogo'), \ boolean(self.nullMask)) self.zones = readmap(self.inputfolder + '/zones') self.samplePoints = self.readmap(self.inputfolder + '/sampPoint') self.sumStats = parameters.getSumStats() self.yieldMap = scalar(self.oneMask) # List of landuse types in order of 'who gets to choose first' self.landUseList = parameters.getLandUseList() self.relatedTypeDict = parameters.getRelatedTypeDict() # Input values from parameters file self.suitFactorDict = parameters.getSuitFactorDict() self.variableSuperDict = parameters.getVariableSuperDict() self.noGoLanduseList = parameters.getNoGoLanduseTypes() # Uniform map of small numbers, used to avoid equal suitabilities. # The same uniform map is applied in each iteration. self.noise = self.uniformMap # noise added as a uniform map created in the create_initial_maps.py # This part used to be the initial # Set seeds to be able to reproduce results random.seed(10) np.random.seed(10) setrandomseed(10) # Create the 'overall' landuse class ## self.environment = uncertainty.getInitialLandUseMap(self.landuse) self.environment = self.landuse self.landUse = LandUse(self.landUseList, self.nullMask) self.landUse.setInitialEnvironment(self.environment) # Create an object for every landuse type in the list self.landUse.createLandUseTypeObjects(self.relatedTypeDict, \ self.suitFactorDict, \ self.weightDict, \ self.variableSuperDict, \ self.noise) # Static suitability factors self.landUse.determineNoGoAreas(self.noGoMap, self.noGoLanduseList) self.landUse.loadDistanceMaps() self.landUse.calculateStaticSuitabilityMaps(self.yieldMap)
import pickle import os import metrics import numpy as np import parameters import calibrate from pcraster.framework import * #### Script to read in the metrics saved as the result of the LU_urb.py script. #### Metrics are transformed into an array # Work directory: work_dir = parameters.getWorkDir() # Get metrics metricNames = parameters.getSumStats() # Get the number of parameter iterations and number of time step defined in the parameter.py script nrOfTimesteps = parameters.getNrTimesteps() numberOfIterations = parameters.getNumberofIterations() iterations = range(1, numberOfIterations + 1, 1) timeSteps = range(1, nrOfTimesteps + 1, 1) # Get the observed time steps. Time steps relate to the year of the CLC data, where 1990 was time step 0. obsSampleNumbers = [1] #range(1,20+1,1) <- for stochastic model obsTimeSteps = parameters.getObsTimesteps() # Read the reference files refArray = parameters.getColFiles() # Path to the folder with the metrics stored country = parameters.getCountryName()
import pickle import os import metrics import numpy as np import parameters ##from pcraster.framework import * ##from numba import njit from scipy.spatial import distance #### Script to find calibrate LU model # Work directory: work_dir = parameters.getWorkDir() # Get metrics metricList = parameters.getSumStats() locationalMetric = parameters.getLocationalAccuracyMetric() all_metrices = metricList+locationalMetric # Get case studies case_studies = parameters.getCaseStudies() # Get calibration scenarios scenarios = parameters.getCalibrationScenarios() # Get the number of parameter iterations and number of time step defined in the parameter.py script nrOfTimesteps=parameters.getNrTimesteps() numberOfIterations = parameters.getNumberofIterations() iterations = range(1, numberOfIterations+1, 1) timeSteps=range(1,nrOfTimesteps+1,1) # Get the observed time steps. Time steps relate to the year of the CLC data, where 1990 was time step 0. obsSampleNumbers = [1] #range(1,20+1,1) <- for stochastic model