def test_parameters_from_stack(): a = Parameter("a", 1) b = Parameter("b", 2) c = Parameter("c", 3) pars = Parameters([a, b]) + Parameters([]) + Parameters([c]) assert pars.names == ["a", "b", "c"]
def test_parameters_from_stack(): a = Parameter("a", 1) b = Parameter("b", 2) c = Parameter("c", 3) pars = Parameters([a, b]) + Parameters([]) + Parameters([c]) assert pars.names == ["a", "b", "c"] pars1 = Parameters.from_values([1, 2], covariance=np.full((2, 2), 2)) pars2 = Parameters.from_values([3, 4, 5], covariance=np.full((3, 3), 3)) pars = pars1 + pars2 assert_allclose(pars.values, [1, 2, 3, 4, 5]) assert_allclose(pars.covariance[0], [2, 2, 0, 0, 0]) assert_allclose(pars.covariance[4], [0, 0, 3, 3, 3])
def covariance_diagonal(): x = Parameter("x", 1, error=0.1) y = Parameter("y", 2, error=0.2) z = Parameter("z", 3, error=0.3) parameters = Parameters([x, y, z]) return Covariance(parameters=parameters)
def from_dict(cls, data): init = cls.read(data["filename"], normalize=data.get("normalize", True)) init.parameters = Parameters.from_dict(data) for parameter in init.parameters.parameters: setattr(init, parameter.name, parameter) return init
def from_dict(cls, data): kwargs = {} par_data = [] key0 = next(iter(data)) if key0 in ["spatial", "temporal", "spectral"]: data = data[key0] if data["type"] not in cls.tag: raise ValueError( f"Invalid model type {data['type']} for class {cls.__name__}" ) input_names = [_["name"] for _ in data["parameters"]] for par in cls.default_parameters: par_dict = par.to_dict() try: index = input_names.index(par_dict["name"]) par_dict.update(data["parameters"][index]) except ValueError: log.warning( f"Parameter '{par_dict['name']}' not defined in YAML file. Using default value: {par_dict['value']} {par_dict['unit']}" ) par_data.append(par_dict) parameters = Parameters.from_dict(par_data) # TODO: this is a special case for spatial models, maybe better move to `SpatialModel` base class if "frame" in data: kwargs["frame"] = data["frame"] return cls.from_parameters(parameters, **kwargs)
def covariance(self, covariance): self._covariance.data = covariance for par in self.parameters: pars = Parameters([par]) variance = self._covariance.get_subcovariance(pars) par.error = np.sqrt(variance)
def from_dict(cls, data): if "filename" in data: bkg_map = Map.read(data["filename"]) elif "map" in data: bkg_map = data["map"] else: # TODO: for now create a fake map for serialization, # uptdated in MapDataset.from_dict() axis = MapAxis.from_edges(np.logspace(-1, 1, 2), unit=u.TeV, name="energy") geom = WcsGeom.create(skydir=(0, 0), npix=(1, 1), frame="galactic", axes=[axis]) bkg_map = Map.from_geom(geom) parameters = Parameters.from_dict(data["parameters"]) return cls.from_parameters( parameters=parameters, map=bkg_map, name=data["name"], datasets_names=data.get("datasets_names"), filename=data.get("filename"), )
def covariance(self): for par in self.parameters: pars = Parameters([par]) covar = Covariance(pars, data=[[par.error ** 2]]) self._covariance.set_subcovariance(covar) return self._covariance
def __init__(self, name=""): self.name = name self.parameters = Parameters( [Parameter("x", 2), Parameter("y", 3e2), Parameter("z", 4e-2)]) self.data_shape = (1, )
def from_dict(cls, data): kwargs = {} par_data = [] input_names = [_["name"] for _ in data["parameters"]] for par in cls.default_parameters: par_dict = par.to_dict() try: index = input_names.index(par_dict["name"]) par_dict.update(data["parameters"][index]) except ValueError: log.warning( f"Parameter {par_dict['name']} not defined. Using default value: {par_dict['value']} {par_dict['unit']}" ) par_data.append(par_dict) parameters = Parameters.from_dict(par_data) # TODO: this is a special case for spatial models, maybe better move to `SpatialModel` base class if "frame" in data: kwargs["frame"] = data["frame"] return cls.from_parameters(parameters, **kwargs)
def __init__(self, r_s=None, rho_s=1 * u.Unit("GeV / cm3")): r_s = self.DEFAULT_SCALE_RADIUS if r_s is None else r_s self.parameters = Parameters([ Parameter("r_s", u.Quantity(r_s)), Parameter("rho_s", u.Quantity(rho_s)) ])
def parameters(self): """List of parameters (`~gammapy.modeling.Parameters`)""" parameters = [] for component in self.models: parameters.append(component.spectral_model.parameters) return Parameters.from_stack(parameters)
def test_unique_parameters(): a = Parameter("a", 1) b = Parameter("b", 2) c = Parameter("c", 3) parameters = Parameters([a, b, a, c]) assert parameters.names == ["a", "b", "a", "c"] parameters_unique = parameters.unique_parameters assert parameters_unique.names == ["a", "b", "c"]
def parameters(self): parameters = [] if self.spatial_model is not None: parameters.append(self.spatial_model.parameters) parameters.append(self.spectral_model.parameters) return Parameters.from_stack(parameters)
def parameters(self): """List of parameters (`~gammapy.modeling.Parameters`)""" parameters = [] if self.model: parameters += self.model.parameters return Parameters(parameters)
def __init__(self, r_s=None, alpha=None, rho_s=1 * u.Unit("GeV / cm3")): alpha = self.DEFAULT_ALPHA if alpha is None else alpha r_s = self.DEFAULT_SCALE_RADIUS if r_s is None else r_s self.parameters = Parameters([ Parameter("r_s", u.Quantity(r_s)), Parameter("alpha", u.Quantity(alpha)), Parameter("rho_s", u.Quantity(rho_s)), ])
def covariance(self): self._check_covariance() for par in self.parameters: pars = Parameters([par]) error = np.nan_to_num(par.error**2, nan=1) covar = Covariance(pars, data=[[error]]) self._covariance.set_subcovariance(covar) return self._covariance
def from_dict(cls, data): kwargs = {} parameters = Parameters.from_dict(data["parameters"]) # TODO: this is a special case for spatial models, maybe better move to `SpatialModel` base class if "frame" in data: kwargs["frame"] = data["frame"] return cls.from_parameters(parameters, **kwargs)
def parameters(self): """List of parameters (`~gammapy.modeling.Parameters`)""" parameters_list = [] if self.model: parameters_list.append(self.model.parameters) if self.background_model: parameters_list.append(self.background_model.parameters) return Parameters.from_stack(parameters_list)
def test_set_subcovariance(): a = Parameter("a", 10) b = Parameter("b", 20) c = Parameter("c", 30) pars_0 = Parameters([a, c, b]) pars_0.covariance = np.zeros((3, 3)) pars_1 = Parameters([a, b]) pars_1.covariance = np.array([[2, 3], [6, 7]]) pars_0.set_subcovariance(pars_1) assert_equal(pars_0.covariance, np.array([[2, 0, 3], [0, 0, 0], [6, 0, 7]]))
def from_dict(cls, data): m = Map.read(data["filename"]) if m.unit == "": m.unit = "sr-1" parameters = Parameters.from_dict(data["parameters"]) return cls.from_parameters(parameters=parameters, map=m, filename=data["filename"], normalize=data.get("normalize", True))
def test_parameters_s(): pars = Parameters([ Parameter("", 20, scale_method="scale10"), Parameter("", 20, scale_method=None), ]) pars_dict = pars.to_dict() pars.autoscale() assert_allclose(pars[0].factor, 2) assert_allclose(pars[0].scale, 10) assert pars_dict[0]["scale_method"] == "scale10" assert pars_dict[1]["scale_method"] is None pars = Parameters.from_dict(pars_dict) pars.autoscale() assert_allclose(pars[0].factor, 2) assert_allclose(pars[0].scale, 10) assert pars[1].scale_method is None pars.autoscale() assert_allclose(pars[1].factor, 20) assert_allclose(pars[1].scale, 1)
def _init_from_parameters(self, parameters): """Create model from list of parameters. This should be called for models that generate the parameters dynamically in ``__init__``, like the ``NaimaSpectralModel`` """ # TODO: should we pass through `Parameters` here? Why? parameters = Parameters(parameters) self._parameters = parameters for parameter in parameters: setattr(self, parameter.name, parameter)
def from_dict(cls, data): if "filename" in data: map = Map.read(data["filename"]) elif "map" in data: map = data["map"] else: raise ValueError("Requires either filename or `Map` object") init = cls(map=map, name=data["name"]) init.parameters = Parameters.from_dict(data) for parameter in init.parameters.parameters: setattr(init, parameter.name, parameter) return init
def model(self, model): self._model = model if model is not None: self._parameters = Parameters(self._model.parameters.parameters) self._predictor = SpectrumEvaluator( model=self.model, livetime=self.livetime, aeff=self.aeff, e_true=self._energy_axis.edges, edisp=self.edisp, ) else: self._parameters = None self._predictor = None
def from_dict(cls, data): m = Map.read(data["filename"]) if not m.unit.is_equivalent("sr-1"): m.unit = "sr-1" log.warning( "Spatial template unit is not equivalent to sr^-1, unit changed to sr^-1" ) parameters = Parameters.from_dict(data["parameters"]) return cls.from_parameters( parameters=parameters, map=m, filename=data["filename"], normalize=data.get("normalize", True), )
def from_dict(cls, data): kwargs = {} par_data = [] for par, par_yaml in zip(cls.default_parameters, data["parameters"]): par_dict = par.to_dict() par_dict.update(par_yaml) par_data.append(par_dict) parameters = Parameters.from_dict(par_data) # TODO: this is a special case for spatial models, maybe better move to `SpatialModel` base class if "frame" in data: kwargs["frame"] = data["frame"] return cls.from_parameters(parameters, **kwargs)
def from_dict(cls, data): parameters = Parameters.from_dict(data["parameters"]) filename = data["filename"] map_ = cls.read(filename).map apply_irf = data.get("apply_irf", cls._apply_irf_default) datasets_names = data.get("datasets_names") name = data.get("name") return cls.from_parameters(parameters=parameters, map=map_, apply_irf=apply_irf, datasets_names=datasets_names, filename=filename, name=name)
def __init__( self, spectral_model, name="bias", bias=0.0, ): self.spectral_model = spectral_model self.parameter_name = name self.bias_parameter = Parameter(name, bias, unit="", min=-1.0, max=2.0, frozen=False) parameters = Parameters([self.bias_parameter]) super()._init_from_parameters(parameters)
def test_get_subcovariance(): a = Parameter("a", 10) b = Parameter("b", 20) c = Parameter("c", 30) pars_0 = Parameters([a, b, c]) pars_0.covariance = np.array([[2, 3, 4], [6, 7, 8], [10, 11, 12]]) pars_1 = Parameters([a, b]) assert_equal(pars_0.get_subcovariance(pars_1), np.array([[2, 3], [6, 7]])) assert_equal(pars_0.get_subcovariance([c]), np.array([[12]]))