def set_backend(self, backend, inplace=False, deep=False): """ Converts triangle array_backend. Parameters ---------- backend : str Currently supported options are 'numpy', 'sparse', and 'cupy' inplace : bool Whether to mutate the existing Triangle instance or return a new one. Returns ------- Triangle with updated array_backend """ if hasattr(self, "array_backend"): old_backend = self.array_backend else: if hasattr(self, "ldf_"): old_backend = self.ldf_.array_backend else: raise ValueError("Unable to determine array backend.") if inplace: if backend in ["numpy", "sparse", "cupy"]: lookup = { "numpy": { "sparse": lambda x: x.todense(), "cupy": lambda x: cp.asnumpy(x), }, "cupy": { "numpy": lambda x: cp.array(x), "sparse": lambda x: cp.array(x.todense()), }, "sparse": { "numpy": lambda x: sp.array(x), "cupy": lambda x: sp.array(cp.asnumpy(x)), }, } if hasattr(self, "values"): self.values = lookup[backend].get(old_backend, lambda x: x)( self.values ) if deep: for k, v in vars(self).items(): if isinstance(v, Common): v.set_backend(backend, inplace=True, deep=True) if hasattr(self, "array_backend"): self.array_backend = backend else: raise AttributeError(backend, "backend is not supported.") return self else: obj = self.copy() return obj.set_backend(backend=backend, inplace=True, deep=deep)
def set_backend(self, backend, inplace=False): ''' Converts triangle array_backend. Parameters ---------- backend : str Currently supported options are 'numpy', 'sparse', and 'cupy' inplace : bool Whether to mutate the existing Triangle instance or return a new one. Returns ------- Triangle with updated array_backend ''' if hasattr(self, 'array_backend'): old_backend = self.array_backend else: if hasattr(self, 'ldf_'): old_backend = self.ldf_.array_backend else: raise ValueError('Unable to determine array backend.') if inplace: if backend in ['numpy', 'sparse', 'cupy']: lookup = { 'numpy': { 'sparse': lambda x: x.todense(), 'cupy': lambda x: cp.asnumpy(x) }, 'cupy': { 'numpy': lambda x: cp.array(x), 'sparse': lambda x: cp.array(x.todense()) }, 'sparse': { 'numpy': lambda x: sp.array(x), 'cupy': lambda x: sp.array(cp.asnumpy(x)) } } if hasattr(self, 'values'): self.values = lookup[backend].get(old_backend, lambda x: x)(self.values) for k, v in vars(self).items(): if isinstance(v, Common): v.set_backend(backend, inplace=True) if hasattr(self, 'array_backend'): self.array_backend = backend else: raise AttributeError(backend, 'backend is not supported.') return self else: obj = copy.deepcopy(self) return obj.set_backend(backend=backend, inplace=True)
def _repr_format(self): if type(self.odims[0]) == np.datetime64: origin = pd.Series(self.odims).dt.to_period(self.origin_grain) else: origin = pd.Series(self.odims) if len(self.ddims) == 1 and self.ddims[0] is None: ddims = list(self.vdims) else: ddims = self.ddims if cp.get_array_module(self.values).__name__ == 'cupy': out = cp.asnumpy(self.values[0, 0]) else: out = self.values[0, 0] out = pd.DataFrame(out, index=origin, columns=ddims) if str(out.columns[0]).find('-') > 0 and not \ isinstance(out.columns, pd.PeriodIndex): out.columns = [ item.replace('-9999', '-Ult') for item in out.columns ] if len(out.drop_duplicates()) != 1: return out else: return out.drop_duplicates().set_index(pd.Index(['(All)'])) else: return out
def to_json(self): ''' Serializes triangle object to json format Returns ------- string representation of object in json format ''' def sparse_out(tri): k, v, o, d = tri.shape xp = cp.get_array_module(tri) if xp == cp != np: out = cp.asnumpy(tri) else: out = tri coo = coo_matrix(np.nan_to_num(out.reshape((k * v * o, d)))) return json.dumps( dict( zip([str(item) for item in zip(coo.row, coo.col)], coo.data))) json_dict = {} if self.is_val_tri: ddims = self.ddims json_dict['ddims'] = { 'dtype': str(ddims.dtype), 'array': ddims.values.tolist() } attributes = ['kdims', 'vdims', 'odims'] else: attributes = ['kdims', 'vdims', 'odims', 'ddims'] for attribute in attributes: json_dict[attribute] = { 'dtype': str(getattr(self, attribute).dtype), 'array': getattr(self, attribute).tolist() } xp = cp.get_array_module(self.values) if xp == cp != np: out = cp.asnumpy(self.cum_to_incr().values) else: out = self.cum_to_incr().values if np.sum(np.nan_to_num(out) == 0) / np.prod(self.shape) > 0.40: json_dict['values'] = { 'dtype': str(out.dtype), 'array': sparse_out(out), 'sparse': True } else: json_dict['values'] = { 'dtype': str(out.dtype), 'array': out.tolist(), 'sparse': False } json_dict['key_labels'] = self.key_labels json_dict['origin_grain'] = self.origin_grain json_dict['development_grain'] = self.development_grain json_dict['nan_override'] = self.nan_override json_dict['is_cumulative'] = self.is_cumulative json_dict['is_val_tri'] = self.is_val_tri json_dict['valuation_date'] = self.valuation_date.strftime('%Y-%m-%d') return json.dumps(json_dict)
def sparse_out(tri): k, v, o, d = tri.shape xp = cp.get_array_module(tri) if xp == cp != np: out = cp.asnumpy(tri) else: out = tri coo = coo_matrix(np.nan_to_num(out.reshape((k*v*o, d)))) return json.dumps(dict(zip([str(item) for item in zip(coo.row, coo.col)], coo.data)))
def _repr_format(self): odims, ddims = self._repr_date_axes() if cp.get_array_module(self.values).__name__ == 'cupy': out = cp.asnumpy(self.values[0, 0]) else: out = self.values[0, 0] out = pd.DataFrame(out, index=odims, columns=ddims) if str(out.columns[0]).find('-') > 0 and not \ isinstance(out.columns, pd.PeriodIndex): out.columns = [ item.replace('-9999', '-Ult') for item in out.columns ] if len(out.drop_duplicates()) != 1: return out else: return out.drop_duplicates().set_index(pd.Index(['(All)'])) else: return out
def dropna(self): """ Method that removes orgin/development vectors from edge of a triangle that are all missing values. This may come in handy for a new line of business that doesn't have origins/developments of an existing line in the same triangle. """ xp = cp.get_array_module(self.values) obj = self.sum(axis=0).sum(axis=1) odim = list(obj.sum(axis=-1).values[0, 0, :, 0]*0+1) min_odim = obj.origin[odim.index(1)] max_odim = obj.origin[::-1][odim[::-1].index(1)] if obj.shape[-1] != 1: if xp.__name__ == 'cupy': ddim = cp.asnumpy(xp.nan_to_num((obj.sum(axis=-2).values*0+1)[0, 0, 0])) else: ddim = np.nan_to_num((obj.sum(axis=-2).values*0+1)[0, 0, 0]) ddim = obj.development.iloc[:, 0][pd.Series(ddim).astype(bool)] obj = self[(self.development >= ddim.min()) & (self.development <= ddim.max())] return obj[(self.origin >= min_odim) & (self.origin <= max_odim)] obj = self[(self.origin >= min_odim) & (self.origin <= max_odim)] return obj