def getCubeValues(self, result, nodeDic, nodeId, query): if isinstance(result, cubepy.Cube): res = {"dims": [], "values": []} _filters = [] if not query["filters"] is None: for dimFilter in query["filters"]: #field = str(dimFilter["field"]).split(".")[0] field = str(dimFilter["field"]) if self.hasDim(result, field): indexType = None # is from numpy evaluator if "." in field and field.startswith("Axis"): indexType = "N" else: indexType = self.getIndexType( nodeDic, nodeId, field) if indexType == "S": _filters.append( cubepy.Index( field, [str(v) for v in dimFilter["values"]])) else: _filters.append( cubepy.Index( field, [int(v) for v in dimFilter["values"]])) _filteredResult = result.filter(_filters) for col in query["columns"]: if col in self.getIndexes(nodeDic[nodeId], result): item = { "field": col, "count": 0, "values": [ str(v) for v in _filteredResult.axis(col).values.tolist() ] # "values": result.filter(_filters).axis(col).values.tolist() } item["count"] = len(item["values"]) res["dims"].append(item) resultValues = _filteredResult.sum(keep=query["columns"]) if isinstance(resultValues, cubepy.Cube): res["values"] = resultValues.transpose( query["columns"]).values.reshape( resultValues.size).tolist() elif isinstance(resultValues, str): res["values"] = [resultValues] else: res["values"] = resultValues return res
def hierarchize(cube, levels, maps): mapArray = nodeDic[maps[0]].result coordValues = mapArray.values.copy() targetIndexId = nodeDic[levels[1]].result.name for pos, level in enumerate(levels): if pos > 0: if not maps[pos] is None: mapArrayLevel = nodeDic[maps[pos]].result for ii in range(len(coordValues)): if not coordValues[ii] is None: try: newVal = mapArrayLevel.filter( mapArrayLevel.dims[0], coordValues[ii]).values.item(0) coordValues[ii] = newVal except Exception as ex: coordValues[ii] = None #raise ValueError("Hierarchy not found. Level: " + targetIndexId + ", value: " + coordValues[ii]) pass # convert to dataarray _coords = [] for _axis in cube.axes: _coord = pd.Index(_axis.values, name=_axis.name) _coords.append(_coord) dataArray = xr.DataArray(cube.values, _coords) # perform aggregate dataArray = dataArray.assign_coords({levels[0]: coordValues}) _df = dataArray.to_series() _df = _df.groupby(list(dataArray.dims), sort=False).agg(sby) _da = _df.to_xarray() reindex_dic = dict() for dimension in _da.dims: if dimension == levels[0]: reindex_dic[dimension] = nodeDic[levels[-1:] [0]].result.values elif dimension in nodeDic and isinstance( nodeDic[dimension].result, cubepy.Index): reindex_dic[dimension] = nodeDic[dimension].result.values _db = _da.reindex(reindex_dic) # convert to cube _indexes = [] for _dim in _db.dims: _index = cubepy.Index(_dim, _db.coords[_dim].values) _indexes.append(_index) _cube = cubepy.Cube(_indexes, _db.values) return _cube
def index(name, values): """Create a index object. name: name for the index values: list of values of the index. Ex. cp.index("items",["Item 1","Item 2","Item 3"]) cp.index("years",[2016,2017,2018]) """ if values is None: values = ["Item 1", "Item 2", "Item 3"] return cubepy.Index(name, values)
def addToFilter(self, nodeDic, dim, filters): if "values" in dim and dim["values"] is not None and len( dim["values"]) > 0: #field = str(dim["field"]).split(".")[0] field = str(dim["field"]) nodeId = None # if len(str(dim["field"]).split("."))>1: # nodeId = str(dim["field"]).split(".")[1] indexType = None # is from numpy evaluator if "." in field and field.startswith("Axis"): indexType = "N" else: indexType = self.getIndexType(nodeDic, nodeId, field) if indexType == "S": filters.append( cubepy.Index(field, [str(v["value"]) for v in dim["values"]])) else: filters.append( cubepy.Index(field, [int(v["value"]) for v in dim["values"]]))
def cubeFromNumpy(npArray): """ Return a cube object from numpy Array. Generate temporal indexes """ _dimsNames = ["axis " + str(x) for x in range(npArray.ndim)] _dimsValues = [ list(x) for x in (range(npArray.shape[y]) for y in range(npArray.ndim)) ] _indexes = [ cubepy.Index(_dimsNames[x], _dimsValues[x]) for x in range(len(_dimsNames)) ] _cube = cube(_indexes, npArray) return _cube
def geoUnclusterData(self, result, nodeDic, nodeId, rowIndex, attIndex, latField="latitude", lngField="longitude", geoField="geoField", labelField="labelField", sizeField="sizeField", colorField="colorField", iconField="iconField"): latField = "latitude" if latField is None else latField lngField = "longitude" if lngField is None else lngField geoField = "geoField" if geoField is None else geoField labelField = "labelField" if labelField is None else labelField sizeField = "sizeField" if sizeField is None else sizeField colorField = "colorField" if colorField is None else colorField iconField = "iconField" if iconField is None else iconField _tmp_for_geo = cubepy.Index('tmp_for_geo', [ latField, lngField, geoField, labelField, sizeField, colorField, iconField ]) _idx = nodeDic[attIndex].result rowIndexObj = nodeDic[rowIndex].result mapCube = result[_idx == _tmp_for_geo].transpose( [rowIndex, "tmp_for_geo"]).values res = dict() points = [] pos = 0 for itemRow in mapCube: vo = dict() vo["id"] = str(rowIndexObj.values[pos]) vo["lat"] = itemRow[0] vo["lng"] = itemRow[1] vo["geoDef"] = itemRow[2] vo["labelRes"] = itemRow[3] vo["sizeRes"] = itemRow[4] vo["colorRes"] = itemRow[5] vo["iconRes"] = itemRow[6] points.append(vo) pos += 1 res["points"] = points return res