def brute_force(): # Prepeare empty list for the calibration parameters: parameters_list = [] # Set step size for calibration min_p = parameters.getParametersforCalibration()[0] max_p = parameters.getParametersforCalibration()[1] stepsize = 0.05 #parameters.getParametersforCalibration()[2] # Assure that steps in the loop have 3 decimal place only p_steps = np.around(np.arange(min_p, max_p + stepsize, stepsize), decimals=3) # Print calibration properties print('Case study: ', parameters.getCountryName()) print('Number of iterations: ', parameters.getNumberofIterations()) print('Min parameter value: ', min_p) print('Max parameter value: ', max_p) print('Parameter step: ', stepsize) # Get the possible combination of parameters: for p1, p2, p3, p4 in ((a, b, c, d) for a in p_steps for b in p_steps for c in p_steps for d in p_steps): sumOfParameters = p1 + p2 + p3 + p4 if (sumOfParameters > 0.9999 and sumOfParameters < 1.0001): parameters_list.append([p1, p2, p3, p4]) # Return a list with parameters combinations return parameters_list
def __init__(self, nr, weights): DynamicModel.__init__(self) # number for reference self.currentSampleNumber = nr # parameters to calibrate self.weightDict = {1: weights} # input and output folders country = parameters.getCountryName() results_mainfolder = os.path.join(work_dir, 'results') if not os.path.isdir(results_mainfolder): os.mkdir(results_mainfolder) output_mainfolder = os.path.join(results_mainfolder, country) if not os.path.isdir(output_mainfolder): os.mkdir(output_mainfolder) self.outputfolder = os.path.join(results_mainfolder, country, str(nr)) if not os.path.isdir(self.outputfolder): os.mkdir(self.outputfolder) self.inputfolder = os.path.join('input_data', country) setclone(self.inputfolder + '/nullmask') ## setglobaloption('nondiagonal') # Save the parameters as a list to the folder with the calculated metrics pName = 'parameters_iteration_' + str(nr) + '.obj' pPath = os.path.join(self.outputfolder, pName) parametersFile = open(pPath, 'wb') pickle.dump(weights, parametersFile) parametersFile.close()
def __init__(self, typeNr, environment, relatedTypeList, suitFactorList, \ weightList, variableDict, noise, nullMask, \ windowLengthRealization): """Create LandUseType object that represents a class on the land use map. Takes ten arguments: typeNr -- class nr of the land use type on the land use map environment -- global land use map that will evolve relatedTypeList -- list with land use type next to which growth is preferred suitFactorList -- list of suitability factors the type takes into account weightList -- list of relative weights for those factors variableDict -- dictionary in which inputs for factors are found noise -- very small random noise to ensure cells can't get same suitability nullMask -- map with value 0 for study area and No Data outside windowLengthRealization -- window length for neighborhood function (stoch) """ self.typeNr = typeNr self.environment = environment self.relatedTypeList = relatedTypeList self.suitFactorList = suitFactorList self.weightList = weightList self.variableDict = variableDict self.noise = noise self.nullMask = nullMask self.toMeters = parameters.getConversionUnit() self.windowLengthRealization = windowLengthRealization self.country = parameters.getCountryName()
def __init__(self, types, nullMask): """Construct a land use object with a nr of types and an environment.""" self.types = types self.nrOfTypes = len(types) #print('\nnr of dynamic land use types is:', self.nrOfTypes) ## self.environment = environment # Map with 0 in study area and No Data outside, used for cover() functions self.nullMask = nullMask self.toMeters = parameters.getConversionUnit() self.country = parameters.getCountryName()
metricNames = parameters.getSumStats() # Get the number of parameter iterations and number of time step defined in the parameter.py script nrOfTimesteps = parameters.getNrTimesteps() numberOfIterations = parameters.getNumberofIterations() iterations = range(1, numberOfIterations + 1, 1) timeSteps = range(1, nrOfTimesteps + 1, 1) # Get the observed time steps. Time steps relate to the year of the CLC data, where 1990 was time step 0. obsSampleNumbers = [1] #range(1,20+1,1) <- for stochastic model obsTimeSteps = parameters.getObsTimesteps() # Read the reference files refArray = parameters.getColFiles() # Path to the folder with the metrics stored country = parameters.getCountryName() resultFolder = os.path.join(work_dir, 'results', country) output_mainfolder = os.path.join(resultFolder, "metrics") ################# ### FUNCTIONS ### ################# def openPickledSamplesAndTimestepsAsNumpyArray(basename,iterations,timesteps, \ obs=False): output = [] for timestep in timesteps: allIterations = [] for i in iterations: # Loop parameters
import string import os, numpy import parameters ##from pcraster import * ##from pcraster.framework import * # Get work directory work_dir = parameters.getWorkDir() inputfolder = os.path.join(work_dir, 'input_data', parameters.getCountryName()) def map2Array(filename, rowColFile): """Selects values at row, col from raster name in Monte Carlo samples. filename -- Name of raster. rowColFile -- File with row and col index of cell to read. The returned array does not contain missing values so the size is minimal sampleNumbers but possibly smaller. Returned array has elements of type numpy.float32""" sampleFile = open(rowColFile, 'r') samplePoints = sampleFile.readlines() sampleFile.close() amap = readmap(filename) ## mask = numpy.zeros((1, len(samplePoints))).astype(numpy.bool_) ## array = numpy.zeros((1, len(samplePoints))).astype(numpy.float32) mask = numpy.zeros(len(samplePoints)).astype(numpy.bool_) array = numpy.zeros(len(samplePoints)).astype(numpy.float32) j = 0 for point in samplePoints: attributes = point.split()