def shape_divide(arr, scale, reduction='mean'): '''Scale down an array (shape N x M x ...) by the specified scale in each dimension (n x m x ...) Each dimension in arr must be divisible by its scale (throws an error otherwise) This is reduces each sub-array (n x m x ...) to a single element according to the reduction parameter, which is one of: * mean (default): mean of each sub-array * median: median of each sub-array * first: the [0,0,0, ...] element of the sub-array * all: all the possible (N x M x ...) sub-arrays; returns an array of shape (n, m, ..., N, M, ...) This is a downsampling operation, similar to scipy.misc.imresize and scipy.ndimage.interpolate''' arr = np.asanyarray(arr) reduction_options = ['mean', 'median', 'first', 'all'] assert reduction in reduction_options, \ 'reduction must be one of: ' + ' '.join(reduction_options) scale = coerce_to_target_length(scale, arr.ndim) assert all([sh % sc == 0 for sh, sc in zip(arr.shape,scale)]), \ 'all dimensions must be divisible by their respective scale!' new_shape = flatten([sh//sc, sc] for sh, sc in zip(arr.shape, scale)) # group pixes into smaller sub-arrays that can then be modified by standard operations subarrays = _transpose_interleaved(arr.reshape(new_shape)) flat_subarrays = subarrays.reshape([np.product(scale)] + new_shape[::2]) return (np.mean(flat_subarrays, axis=0) if reduction == 'mean' else np.median(flat_subarrays, axis=0) if reduction == 'median' else flat_subarrays[0] if reduction == 'first' else subarrays if reduction == 'all' else None)
def zeroFill(t, a, sc): t *= 0 middle_slice = flatten([slice(None), i//2] for i in sc) t[middle_slice] = a return t
def GetTriangleBorderPoints(p0,p1,p2): '''Collect all the border points for a raster triangle and remove any duplicates''' allPts = [BresenhamFunction(pa,pb) for pa,pb in ((p0,p1),(p1,p2),(p2,p0))] return list(set(totuple(flatten(allPts))))