def assert_equal(a, b): if convert: a = aslarray(a) b = aslarray(b) if check_axes and a.axes != b.axes: raise AssertionError("axes differ:\n%s\n\nvs\n\n%s" % (a.axes.info, b.axes.info)) equal = test_func(a, b) if not equal.all(): notequal = ~equal raise AssertionError("\ngot:\n\n%s\n\nexpected:\n\n%s" % (a[notequal], b[notequal]))
def test_aslarray(self): with open_excel(visible=False) as wb: sheet = wb[0] arr1 = ndtest([Axis(2), Axis(3)]) # no header so that we have an uniform dtype for the whole sheet sheet['A1'] = arr1 res1 = aslarray(sheet['A1:C2']) assert res1.equals(arr1) assert res1.dtype == arr1.dtype
def test_aslarray(self): with open_excel(visible=False) as wb: sheet = wb[0] arr1 = ndrange((2, 3)) # no header so that we have an uniform dtype for the whole sheet sheet['A1'] = arr1 res1 = aslarray(sheet['A1:C2']) assert larray_equal(res1, arr1) assert res1.dtype == arr1.dtype
def set_data(self, data, changes=None, current_filter=None, bg_gradient=None, bg_value=None): # ------------------- set changes ------------------- if changes is None: changes = {} assert isinstance(changes, dict) self.changes = changes self._changes2D = {} # -------------------- set data --------------------- if data is None: data = np.empty((0, 0), dtype=np.int8) la_data = la.aslarray(data) if la_data.dtype.names is None: dtn = la_data.dtype.name if dtn not in SUPPORTED_FORMATS and not dtn.startswith('str') \ and not dtn.startswith('unicode'): QMessageBox.critical( self.dialog, "Error", "{} arrays are currently not supported".format(dtn)) return # for complex numbers, shading will be based on absolute value # but for all other types it will be the real part # TODO: there are a lot more complex dtypes than this. Is there a way to get them all in one shot? if la_data.dtype in (np.complex64, np.complex128): self.color_func = np.abs else: # XXX: this is a no-op (it returns the array itself) for most types (I think all non complex types) # => use an explicit nop? # def nop(v): # return v # self.color_func = nop self.color_func = np.real self.la_data = la_data # ------------ set bg gradient and value ------------ self.bg_gradient = bg_gradient self.bg_value = bg_value # ------ set current filter and data to display ----- if current_filter is None: current_filter = {} assert isinstance(current_filter, dict) self.current_filter = current_filter self._set_labels_and_data_to_display() # ------------------- reset model ------------------- self.reset()
def _set_labels_and_data_to_display(self): la_data = self.filtered_data if np.isscalar(la_data): la_data = la.aslarray(la_data) ndim, shape, axes = la_data.ndim, la_data.shape, la_data.axes # get 2D shape + xlabels + ylabels if ndim == 0: self.xlabels = [[], []] self.ylabels = [[]] shape_2D = (1, 1) elif ndim == 1: self.xlabels = [axes.display_names, axes.labels[-1]] self.ylabels = [[]] shape_2D = (1, ) + shape else: self.xlabels = [axes.display_names, axes.labels[-1]] otherlabels = axes.labels[:-1] prod = Product(otherlabels) self.ylabels = [_LazyNone(len(prod) + 1)] + [ _LazyDimLabels(prod, i) for i in range(len(otherlabels)) ] shape_2D = (np.prod(shape[:-1]), shape[-1]) # set data (reshape to a 2D array if not) self._data2D = la_data.data.reshape(shape_2D) self.total_rows, self.total_cols = shape_2D size = self.total_rows * self.total_cols self.reset_minmax() # Use paging when the total size, number of rows or number of # columns is too large if size > LARGE_SIZE: self.rows_loaded = min(self.ROWS_TO_LOAD, self.total_rows) self.cols_loaded = min(self.COLS_TO_LOAD, self.total_cols) else: if self.total_rows > LARGE_NROWS: self.rows_loaded = self.ROWS_TO_LOAD else: self.rows_loaded = self.total_rows if self.total_cols > LARGE_COLS: self.cols_loaded = self.COLS_TO_LOAD else: self.cols_loaded = self.total_cols self._set_local_changes()