def _compute_masks(self): """ Call mask factories and convert to the dataset dtype Returns ------- a list of masks with contents as they were created by the factories and converted uniformly to dense or sparse matrices depending on ``self.use_sparse``. """ # Make sure all the masks are either sparse or dense # If the use_sparse property is set to Ture or False, # it takes precedence. # If it is None, use sparse only if all masks are sparse # and set the use_sparse property accordingly raw_masks = [f().astype(self.dtype) for f in self.mask_factories] if self.use_sparse is True: masks = [to_sparse(m) for m in raw_masks] elif self.use_sparse is False: masks = [to_dense(m) for m in raw_masks] else: sparse = [sp.issparse(m) for m in raw_masks] if all(sparse): self.use_sparse = True masks = raw_masks else: self.use_sparse = False masks = [to_dense(m) for m in raw_masks] return masks
def _run_mask_test_program(lt_ctx, dataset, mask, expected, TYPE='JOB'): if TYPE == 'UDF': dtype = UDF.USE_NATIVE_DTYPE else: dtype = np.result_type(dataset.dtype, mask.dtype) analysis_default = lt_ctx.create_mask_analysis(dataset=dataset, factories=[lambda: mask], dtype=dtype) analysis_default.TYPE = TYPE analysis_sparse = lt_ctx.create_mask_analysis( dataset=dataset, factories=[lambda: to_sparse(mask)], use_sparse=True, dtype=dtype) analysis_sparse.TYPE = TYPE analysis_dense = lt_ctx.create_mask_analysis( dataset=dataset, factories=[lambda: to_dense(mask)], use_sparse=False, dtype=dtype) analysis_dense.TYPE = TYPE results_default = lt_ctx.run(analysis_default) results_sparse = lt_ctx.run(analysis_sparse) results_dense = lt_ctx.run(analysis_dense) assert np.allclose(results_default.mask_0.raw_data, expected) assert np.allclose(results_sparse.mask_0.raw_data, expected) assert np.allclose(results_dense.mask_0.raw_data, expected)
def _run_mask_test_program(lt_ctx, dataset, mask, expected): analysis_default = lt_ctx.create_mask_analysis( dataset=dataset, factories=[lambda: mask] ) analysis_sparse = lt_ctx.create_mask_analysis( dataset=dataset, factories=[lambda: to_sparse(mask)], use_sparse=True ) analysis_dense = lt_ctx.create_mask_analysis( dataset=dataset, factories=[lambda: to_dense(mask)], use_sparse=False ) results_default = lt_ctx.run(analysis_default) results_sparse = lt_ctx.run(analysis_sparse) results_dense = lt_ctx.run(analysis_dense) assert np.allclose( results_default.mask_0.raw_data, expected ) assert np.allclose( results_sparse.mask_0.raw_data, expected ) assert np.allclose( results_dense.mask_0.raw_data, expected )
def _compute_masks(self): """ Call mask factories and combine to mask stack Returns ------- a list of masks with contents as they were created by the factories and converted uniformly to dense or sparse matrices depending on ``self.use_sparse``. """ # Make sure all the masks are either sparse or dense # If the use_sparse property is set to Ture or False, # it takes precedence. # If it is None, use sparse only if all masks are sparse # and set the use_sparse property accordingly default_sparse = 'scipy.sparse' if callable(self.mask_factories): raw_masks = self.mask_factories() if not is_sparse(raw_masks): default_sparse = False mask_slices = [raw_masks] else: mask_slices = [] for f in self.mask_factories: m = f() # Scipy.sparse is always 2D, so we have to convert here # before reshaping if scipy.sparse.issparse(m): m = sparse.COO.from_scipy_sparse(m) # We reshape to be a stack of 1 so that we can unify code below m = m.reshape((1, ) + m.shape) if not is_sparse(m): default_sparse = False mask_slices.append(m) if self._use_sparse is None: self._use_sparse = default_sparse if self.use_sparse: # Conversion to correct back-end will happen later # Use sparse.pydata because it implements the array interface # which makes mask handling easier masks = sparse.concatenate([to_sparse(m) for m in mask_slices]) else: masks = np.concatenate([to_dense(m) for m in mask_slices]) return masks
def _compute_masks(self): """ Call mask factories and convert to the dataset dtype Returns ------- a list of masks with contents as they were created by the factories and converted uniformly to dense or sparse matrices depending on ``self.use_sparse``. """ # Make sure all the masks are either sparse or dense # If the use_sparse property is set to Ture or False, # it takes precedence. # If it is None, use sparse only if all masks are sparse # and set the use_sparse property accordingly if callable(self.mask_factories): raw_masks = self.mask_factories().astype(self.dtype) default_sparse = is_sparse(raw_masks) mask_slices = [raw_masks] else: mask_slices = [] default_sparse = True for f in self.mask_factories: m = f().astype(self.dtype) # Scipy.sparse is always 2D, so we have to convert here # before reshaping if scipy.sparse.issparse(m): m = sparse.COO.from_scipy_sparse(m) # We reshape to be a stack of 1 so that we can unify code below m = m.reshape((1, ) + m.shape) default_sparse = default_sparse and is_sparse(m) mask_slices.append(m) if self.use_sparse is None: self.use_sparse = default_sparse if self.use_sparse: masks = sparse.concatenate([to_sparse(m) for m in mask_slices]) else: masks = np.concatenate([to_dense(m) for m in mask_slices]) return masks
def _naive_mask_apply(masks, data): """ masks: list of masks data: 4d array of input data returns array of shape (num_masks, scan_y, scan_x) """ assert len(data.shape) == 4 for mask in masks: assert mask.shape == data.shape[2:], "mask doesn't fit frame size" dtype = np.result_type(*[m.dtype for m in masks], data.dtype) res = np.zeros((len(masks), ) + tuple(data.shape[:2]), dtype=dtype) for n in range(len(masks)): mask = to_dense(masks[n]) for i in range(data.shape[0]): for j in range(data.shape[1]): item = data[i, j].ravel().dot(mask.ravel()) res[n, i, j] = item return res