def test_categorical_unc(self): values = ('a', 'b', 'c') name = "test" unc = CategoricalUncertainty(values, name) self.assertEqual('a', unc.transform(0)) self.assertEqual(0, unc.invert('a')) self.assertRaises(IndexError, unc.transform, 3) self.assertRaises(ValueError, unc.invert, 'd')
def test_categorical_unc(self): values = ("a", "b", "c") name = "test" unc = CategoricalUncertainty(values, name) self.assertEqual("a", unc.transform(0)) self.assertEqual(0, unc.invert("a")) self.assertRaises(IndexError, unc.transform, 3) self.assertRaises(ValueError, unc.invert, "d")
def test_uncertainty_identity(self): # what are the test cases # uncertainties are the same # let's add some uncertainties to this shared_ab_1 = ParameterUncertainty((0, 10), "shared ab 1") shared_ab_2 = ParameterUncertainty((0, 10), "shared ab 1") self.assertTrue(shared_ab_1 == shared_ab_2) self.assertTrue(shared_ab_2 == shared_ab_1) # uncertainties are not the same shared_ab_1 = ParameterUncertainty((0, 10), "shared ab 1") shared_ab_2 = ParameterUncertainty((0, 10), "shared ab 1", integer=True) self.assertFalse(shared_ab_1 == shared_ab_2) self.assertFalse(shared_ab_2 == shared_ab_1) # uncertainties are of different classes # what should happen then? # in principle it should return false, but what if the classes are # different but the __dicts__ are the same? This would be lousy coding, # but should it concern us here? shared_ab_1 = ParameterUncertainty((0, 10), "shared ab 1") shared_ab_2 = CategoricalUncertainty([x for x in range(11)], "shared ab 1") self.assertFalse(shared_ab_1 == shared_ab_2) self.assertFalse(shared_ab_2 == shared_ab_1)
def test_init(self): values = (0, 1) name = "test" integer = False unc = ParameterUncertainty(values, name, integer=integer) self.assertEqual(values, unc.values) self.assertEqual(name, unc.name) self.assertEqual(name, str(unc)) self.assertEqual(UNIFORM, unc.dist) values = (0, 1) name = "test" integer = True unc = ParameterUncertainty(values, name, integer=integer) self.assertEqual(values, unc.values) self.assertEqual(name, unc.name) self.assertEqual(INTEGER, unc.dist) values = ('a', 'b', 'c') name = "test" unc = CategoricalUncertainty(values, name) self.assertEqual(values, unc.categories) self.assertEqual((0, len(values) - 1), unc.values) self.assertEqual(name, unc.name) self.assertEqual(INTEGER, unc.dist)
class PredatorPrey(netlogo.NetLogoModelStructureInterface): model_file = r"/Wolf Sheep Predation.nlogo" run_length = 1000 uncertainties = [ ParameterUncertainty((10, 100), "grass-regrowth-time"), CategoricalUncertainty(("true", "false"), "grass?") ] outcomes = [Outcome('sheep', time=True), Outcome('wolves', time=True)]
class PredatorPrey(NetLogoModelStructureInterface): model_file = r"/Wolf Sheep Predation.nlogo" run_length = 1000 uncertainties = [ ParameterUncertainty((1, 99), "grass-regrowth-time"), ParameterUncertainty((1, 250), "initial-number-sheep"), ParameterUncertainty((1, 250), "initial-number-wolves"), ParameterUncertainty((1, 20), "sheep-reproduce"), ParameterUncertainty((1, 20), "wolf-reproduce"), CategoricalUncertainty(("true", "true"), "grass?") ] outcomes = [ Outcome('sheep', time=True), Outcome('wolves', time=True), Outcome('grass', time=True) # TODO patches not working in reporting ]
class ScarcityModel(VensimModelStructureInterface): model_file = r'\MetalsEMA.vpm' outcomes = [Outcome('relative market price', time=True), Outcome('supply demand ratio', time=True), Outcome('real annual demand', time=True), Outcome('produced of intrinsically demanded', time=True), Outcome('supply', time=True), Outcome('Installed Recycling Capacity', time=True), Outcome('Installed Extraction Capacity', time=True)] uncertainties = [ParameterUncertainty((0, 0.5), "price elasticity of demand"), ParameterUncertainty((0.6, 1.2), "fraction of maximum extraction capacity used"), ParameterUncertainty((1,4), "initial average recycling cost"), ParameterUncertainty((0, 15000),"exogenously planned extraction capacity"), ParameterUncertainty((0.1, 0.5),"absolute recycling loss fraction"), ParameterUncertainty((0, 0.4),"normal profit margin"), ParameterUncertainty((100000, 120000),"initial annual supply"), ParameterUncertainty((1500000, 2500000),"initial in goods"), ParameterUncertainty((1, 10),"average construction time extraction capacity"), ParameterUncertainty((20, 40),"average lifetime extraction capacity"), ParameterUncertainty((20, 40),"average lifetime recycling capacity"), ParameterUncertainty((5000, 20000),"initial extraction capacity under construction"), ParameterUncertainty((5000, 20000),"initial recycling capacity under construction"), ParameterUncertainty((5000, 20000),"initial recycling infrastructure"), #order of delay CategoricalUncertainty((1,4,10, 1000), "order in goods delay"), CategoricalUncertainty((1,4,10), "order recycling capacity delay"), CategoricalUncertainty((1,4,10), "order extraction capacity delay"), #uncertainties associated with lookups ParameterUncertainty((20, 50),"lookup shortage loc"), ParameterUncertainty((1, 5),"lookup shortage speed"), ParameterUncertainty((0.1, 0.5),"lookup price substitute speed"), ParameterUncertainty((3, 7),"lookup price substitute begin"), ParameterUncertainty((15, 25),"lookup price substitute end"), ParameterUncertainty((0.01, 0.2),"lookup returns to scale speed"), ParameterUncertainty((0.3, 0.7),"lookup returns to scale scale"), ParameterUncertainty((0.01, 0.2),"lookup approximated learning speed"), ParameterUncertainty((0.3, 0.6),"lookup approximated learning scale"), ParameterUncertainty((30, 60),"lookup approximated learning start")] def returnsToScale(self, x, speed, scale): return (x*1000, scale*1/(1+exp(-1* speed * (x-50)))) def approxLearning(self, x, speed, scale, start): x = x-start loc = 1 - scale a = (x*10000, scale*1/(1+exp(speed * x))+loc) return a def f(self,x, speed, loc): return (x/10, loc*1/(1+exp(speed * x))) def priceSubstite(self, x, speed, begin, end): scale = 2 * end start = begin - scale/2 return (x+2000, scale*1/(1+exp(-1* speed * x)) +start) def run_model(self, kwargs): """Method for running an instantiated model structure """ loc = kwargs.pop("lookup shortage loc") speed = kwargs.pop("lookup shortage speed") kwargs['shortage price effect lookup'] = [self.f(x/10, speed, loc) for x in range(0,100)] speed = kwargs.pop("lookup price substitute speed") begin = kwargs.pop("lookup price substitute begin") end = kwargs.pop("lookup price substitute end") kwargs['relative price substitute lookup'] = [self.priceSubstite(x, speed, begin, end) for x in range(0,100, 10)] scale = kwargs.pop("lookup returns to scale speed") speed = kwargs.pop("lookup returns to scale scale") kwargs['returns to scale lookup'] = [self.returnsToScale(x, speed, scale) for x in range(0, 101, 10)] scale = kwargs.pop("lookup approximated learning speed") speed = kwargs.pop("lookup approximated learning scale") start = kwargs.pop("lookup approximated learning start") kwargs['approximated learning effect lookup'] = [self.approxLearning(x, speed, scale, start) for x in range(0, 101, 10)] super(ScarcityModel, self).run_model(kwargs)
def __init__(self, lookup_type, values, name, msi, ymin=None, ymax=None): ''' Parameters ---------- lookup_type : {'categories', 'hearne', 'approximation'} the method to be used for alternative generation. values : collection the values for specifying the uncertainty from which to sample. If 'lookup_type' is "categories", a set of alternative lookup functions to be entered as tuples of x,y points. Example definition: LookupUncertainty([[(0.0, 0.05), (0.25, 0.15), (0.5, 0.4), (0.75, 1), (1, 1.25)], [(0.0, 0.1), (0.25, 0.25), (0.5, 0.75), (1, 1.25)], [(0.0, 0.0), (0.1, 0.2), (0.3, 0.6), (0.6, 0.9), (1, 1.25)]], "TF3", 'categories', self ) if 'lookup_type' is "hearne1", a list of ranges for each parameter Single-extreme piecewise functions m: maximum deviation from l of the distortion function p: the point that this occurs l: lower end point u: upper end point If 'lookup_type' is "hearne2", a list of ranges for each parameter. Double extreme piecewise linear functions with variable endpoints are used to distort the lookup functions. These functions are defined by 6 parameters, being m1, m2, p1, p2, l and u; and the uncertainty ranges for these 6 parameters should be given as the values of this lookup uncertainty if Hearne's method is chosen. The meaning of these parameters is simply: m1: maximum deviation (peak if positive, bottom if negative) of the distortion function from l in the first segment p1: where this peak occurs in the x axis m2: maximum deviation of the distortion function from l or u in the second segment p2: where the second peak/bottom occurs l : lower end point, namely the y value for x_min u : upper end point, namely the y value for x_max Example definition: LookupUncertainty([(-1, 2), (-1, 1), (0, 1), (0, 1), (0, 0.5), (0.5, 1.5)], "TF2", 'hearne', self, 0, 2) If 'lookup_type' is "approximation", an analytical function approximation (a logistic function) will be used, instead of a lookup. This function also has 6 parameters whose ranges should be given: A: the lower asymptote K: the upper asymptote B: the growth rate Q: depends on the value y(0) M: the time of maximum growth if Q=v Example definition: TODO: name : str name of the uncertainty msi : VensimModelStructureInterface instance model structure interface, to be used for adding new parameter uncertainties min : float min value the lookup function can take, this argument is not needed in case of CAT max : float max value the lookup function can take, this argument is not needed in case of CAT ''' super(LookupUncertainty, self).__init__(values, name) self.lookup_type = lookup_type self.y_min = ymin self.y_max = ymax self.error_message = self.error_message.format(self.name) self.transform_functions = { self.HEARNE1: self._hearne1, self.HEARNE2: self._hearne2, self.APPROX: self._approx, self.CAT: self._cat } if self.lookup_type == "categories": msi.uncertainties.append( CategoricalUncertainty(range(len(values)), "c-" + self.name)) msi._lookup_uncertainties.append(self) elif self.lookup_type == "hearne1": msi.uncertainties.append( ParameterUncertainty(values[0], "m-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[1], "p-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[2], "l-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[3], "u-" + self.name)) msi._lookup_uncertainties.append(self) elif self.lookup_type == "hearne2": msi.uncertainties.append( ParameterUncertainty(values[0], "m1-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[1], "m2-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[2], "p1-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[3], "p2-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[4], "l-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[5], "u-" + self.name)) msi._lookup_uncertainties.append(self) elif self.lookup_type == "approximation": msi.uncertainties.append( ParameterUncertainty(values[0], "A-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[1], "K-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[2], "B-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[3], "Q-" + self.name)) msi.uncertainties.append( ParameterUncertainty(values[4], "M-" + self.name)) msi._lookup_uncertainties.append(self) else: raise EMAError(self.error_message)