def generate_lhs_flrg(self, sample): lags = {} flrgs = [] for o in np.arange(0, self.order): lhs = [ key for key in self.partitioner.ordered_sets if self.sets[key].membership(sample[o]) > 0.0 ] lags[o] = lhs root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) # Trace the possible paths for p in root.paths(): flrg = ProbabilisticWeightedFLRG(self.order) path = list(reversed(list(filter(None.__ne__, p)))) for lhs in path: flrg.append_lhs(lhs) flrgs.append(flrg) return flrgs
def generate_lhs_flrg(self, sample): lags = {} flrgs = [] for ct, o in enumerate(self.lags): lhs = [ key for key in self.partitioner.ordered_sets if self.sets[key].membership(sample[o - 1]) > self.alpha_cut ] lags[ct] = lhs root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) # Trace the possible paths for p in root.paths(): flrg = HighOrderFLRG(self.order) path = list(reversed(list(filter(None.__ne__, p)))) for lhs in path: flrg.append_lhs(lhs) flrgs.append(flrg) return flrgs
def generate_lhs_flrg_fuzzyfied(self, sample, explain=False): lags = {} flrgs = [] for ct, o in enumerate(self.lags): lags[ct] = sample[o-1] if explain: print("\t (Lag {}) {} -> {} \n".format(o, sample[o-1], lhs)) root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) # Trace the possible paths for p in root.paths(): flrg = WeightedHighOrderFLRG(self.order) path = list(reversed(list(filter(None.__ne__, p)))) for lhs in path: flrg.append_lhs(lhs) flrgs.append(flrg) return flrgs
def generate_lhs_flrg(self, sample): lags = {} flrgs = [] for ct, o in enumerate(self.lags): lhs = FuzzySet.fuzzyfy(sample[o - 1], partitioner=self.partitioner, mode="sets", alpha_cut=self.alpha_cut) lags[ct] = lhs root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) # Trace the possible paths for p in root.paths(): flrg = ProbabilisticWeightedFLRG(self.order) path = list(reversed(list(filter(None.__ne__, p)))) for lhs in path: flrg.append_lhs(lhs) flrgs.append(flrg) return flrgs
def _affected_flrgs(self, sample, k, time_displacement, window_size): # print("input: " + str(ndata[k])) affected_flrgs = [] affected_flrgs_memberships = [] lags = {} for ct, dat in enumerate(sample): tdisp = common.window_index((k + time_displacement) - (self.order - ct), window_size) sel = [ct for ct, key in enumerate(self.partitioner.ordered_sets) if self.sets[key].membership(dat, tdisp) > 0.0] if len(sel) == 0: sel.append(common.check_bounds_index(dat, self.partitioner, tdisp)) lags[ct] = sel # Build the tree with all possible paths root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) # Trace the possible paths and build the PFLRG's for p in root.paths(): path = list(reversed(list(filter(None.__ne__, p)))) flrg = HighOrderNonStationaryFLRG(self.order) for kk in path: flrg.append_lhs(self.sets[self.partitioner.ordered_sets[kk]]) affected_flrgs.append(flrg) # affected_flrgs_memberships.append_rhs(flrg.get_membership(sample, disp)) # print(flrg.get_key()) # the FLRG is here because of the bounds verification mv = [] for ct, dat in enumerate(sample): td = common.window_index((k + time_displacement) - (self.order - ct), window_size) tmp = flrg.LHS[ct].membership(dat, td) mv.append(tmp) # print(mv) affected_flrgs_memberships.append(np.prod(mv)) return [affected_flrgs, affected_flrgs_memberships]
def forecast_ahead_distribution(self, data, steps, **kwargs): if 'method' in kwargs: self.point_method = kwargs.get('method', 'mean') smooth = kwargs.get("smooth", "KDE") alpha = kwargs.get("alpha", None) ret = [] start = kwargs.get('start', self.order) uod = self.get_UoD() sample = data[start - self.order:start] for k in np.arange(self.order, steps + self.order): forecasts = [] lags = {} for i in np.arange(0, self.order): lags[i] = sample[k - self.order] # Build the tree with all possible paths root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) for p in root.paths(): path = list(reversed(list(filter(None.__ne__, p)))) forecasts.extend(self.get_models_forecasts(path)) sample.append(sampler(forecasts, np.arange(0.1, 1, 0.1))) if alpha is None: forecasts = np.ravel(forecasts).tolist() else: forecasts = self.get_distribution_interquantile( np.ravel(forecasts).tolist(), alpha) dist = ProbabilityDistribution.ProbabilityDistribution( smooth, uod=uod, data=forecasts, name="", **kwargs) ret.append(dist) return ret
def generate_flrg(self, data, **kwargs): l = len(data) window_size = kwargs.get("window_size", 1) for k in np.arange(self.order, l): if self.dump: print("FLR: " + str(k)) sample = data[k - self.order: k] disp = common.window_index(k, window_size) rhs = [self.sets[key] for key in self.partitioner.ordered_sets if self.sets[key].membership(data[k], disp) > 0.0] if len(rhs) == 0: rhs = [common.check_bounds(data[k], self.partitioner, disp)] lags = {} for o in np.arange(0, self.order): tdisp = common.window_index(k - (self.order - o), window_size) lhs = [self.sets[key] for key in self.partitioner.ordered_sets if self.sets[key].membership(sample[o], tdisp) > 0.0] if len(lhs) == 0: lhs = [common.check_bounds(sample[o], self.partitioner, tdisp)] lags[o] = lhs root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) # Trace the possible paths for p in root.paths(): flrg = HighOrderNonStationaryFLRG(self.order) path = list(reversed(list(filter(None.__ne__, p)))) for c, e in enumerate(path, start=0): flrg.append_lhs(e) if flrg.get_key() not in self.flrgs: self.flrgs[flrg.get_key()] = flrg; for st in rhs: self.flrgs[flrg.get_key()].append_rhs(st)
def forecast_ahead_interval(self, data, steps, **kwargs): if 'method' in kwargs: self.interval_method = kwargs.get('method', 'quantile') if 'alpha' in kwargs: self.alpha = kwargs.get('alpha', self.alpha) ret = [] samples = [[k] for k in data[-self.order:]] for k in np.arange(self.order, steps + self.order): forecasts = [] lags = {} for i in np.arange(0, self.order): lags[i] = samples[k - self.order + i] # Build the tree with all possible paths root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) for p in root.paths(): path = list(reversed(list(filter(None.__ne__, p)))) forecasts.extend(self.get_models_forecasts(path)) samples.append(sampler(forecasts, np.arange(0.1, 1, 0.2))) interval = self.get_interval(forecasts) if len(interval) == 1: interval = interval[0] ret.append(interval) return ret
def generate_lhs_flrs(self, data): flrs = [] lags = {} for vc, var in enumerate(self.explanatory_variables): data_point = data[var.data_label] lags[vc] = common.fuzzyfy_instance(data_point, var) root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) for p in root.paths(): path = list(reversed(list(filter(None.__ne__, p)))) flr = MVFLR.FLR() for v, s in path: flr.set_lhs(v, s) if len(flr.LHS.keys()) == len(self.explanatory_variables): flrs.append(flr) return flrs
def generate_lhs_flrg(self, sample): lags = {} flrgs = [] for o in np.arange(0, self.order): lhs = self.fuzzyfication(sample[o]) lags[o] = lhs root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) # Trace the possible paths for p in root.paths(): flrg = ClusteredMultivariateHighOrderFLRG(self.order) path = list(reversed(list(filter(None.__ne__, p)))) for lhs in path: flrg.append_lhs(lhs) flrgs.append(flrg) return flrgs
def forecast_ahead_distribution(self, ndata, steps, **kwargs): ret = [] smooth = kwargs.get("smooth", "none") uod = self.get_UoD() if 'bins' in kwargs: _bins = kwargs.pop('bins') nbins = len(_bins) else: nbins = kwargs.get("num_bins", 100) _bins = np.linspace(uod[0], uod[1], nbins) start = kwargs.get('start', self.order) sample = ndata[start - self.order:start] for dat in sample: if 'type' in kwargs: kwargs.pop('type') tmp = ProbabilityDistribution.ProbabilityDistribution(smooth, uod=uod, bins=_bins, **kwargs) tmp.set(dat, 1.0) ret.append(tmp) dist = self.forecast_distribution(sample, bins=_bins)[0] ret.append(dist) for k in np.arange(self.order + 1, steps + self.order + 1): dist = ProbabilityDistribution.ProbabilityDistribution(smooth, uod=uod, bins=_bins, **kwargs) lags = {} # Find all bins of past distributions with probability greater than zero for ct, dd in enumerate(ret[k - self.order:k]): vals = [ float(v) for v in dd.bins if round(dd.density(v), 4) > 0 ] lags[ct] = sorted(vals) root = tree.FLRGTreeNode(None) tree.build_tree_without_order(root, lags, 0) # Trace all possible combinations between the bins of past distributions for p in root.paths(): path = list(reversed(list(filter(None.__ne__, p)))) # get the combined probabilities for this path pk = np.prod([ ret[k - self.order + o].density(path[o]) for o in np.arange(0, self.order) ]) d = self.forecast_distribution(path)[0] for bin in _bins: dist.set(bin, dist.density(bin) + pk * d.density(bin)) ret.append(dist) return ret[self.order:]