def invert_epochs(epochs, end=None): """inverts epochs inverted The first epoch will be mapped to [0, start] and the last will be mapped to [end of last epoch, :end:]. Epochs that accidentally become negative or zero-length will be omitted. :type epochs: ndarray :param epochs: epoch set to invert :type end: int :param end: If not None, it i taken for the end of the last epoch, else max(index-dtype) is taken instead. Default=None :returns: ndarray - inverted epoch set """ # checks if end is None: end = sp.iinfo(INDEX_DTYPE).max else: end = INDEX_DTYPE.type(end) # flip them rval = sp.vstack((sp.concatenate(([0], epochs[:, 1])), sp.concatenate((epochs[:, 0], [end])))).T return (rval[rval[:, 1] - rval[:, 0] > 0]).astype(INDEX_DTYPE)
def invert_epochs(epochs, end=None): """inverts epochs inverted The first epoch will be mapped to [0, start] and the last will be mapped to [end of last epoch, :end:]. Epochs that accidentally become negative or zero-length will be omitted. :type epochs: ndarray :param epochs: epoch set to invert :type end: int :param end: If not None, it i taken for the end of the last epoch, else max(index-dtype) is taken instead. Default=None :returns: ndarray - inverted epoch set """ # checks if end is None: end = sp.iinfo(INDEX_DTYPE).max else: end = INDEX_DTYPE.type(end) # flip them rval = sp.vstack((sp.concatenate( ([0], epochs[:, 1])), sp.concatenate((epochs[:, 0], [end])))).T return (rval[rval[:, 1] - rval[:, 0] > 0]).astype(INDEX_DTYPE)
def _generate_masked_mesh(self, cell_mask=None): r""" Generates the mesh based on the cell mask provided """ # if cell_mask is None: cell_mask = sp.ones(self.data_map.shape, dtype=bool) # # initializing arrays self._edges = sp.ones(0, dtype=str) self._merge_patch_pairs = sp.ones(0, dtype=str) self._create_blocks(cell_mask) # # building face arrays mapper = sp.ravel(sp.array(cell_mask, dtype=int)) mapper[mapper == 1] = sp.arange(sp.count_nonzero(mapper)) mapper = sp.reshape(mapper, (self.nz, self.nx)) mapper[~cell_mask] = -sp.iinfo(int).max # boundary_dict = { 'bottom': {'bottom': mapper[0, :][cell_mask[0, :]]}, 'top': {'top': mapper[-1, :][cell_mask[-1, :]]}, 'left': {'left': mapper[:, 0][cell_mask[:, 0]]}, 'right': {'right': mapper[:, -1][cell_mask[:, -1]]}, 'front': {'front': mapper[cell_mask]}, 'back': {'back': mapper[cell_mask]}, 'internal': {'bottom': [], 'top': [], 'left': [], 'right': []} } # # determining cells linked to a masked cell cell_mask = sp.where(~sp.ravel(cell_mask))[0] inds = sp.in1d(self._field._cell_interfaces, cell_mask) inds = sp.reshape(inds, (len(self._field._cell_interfaces), 2)) inds = inds[:, 0].astype(int) + inds[:, 1].astype(int) inds = (inds == 1) links = self._field._cell_interfaces[inds] # # adjusting order so masked cells are all on links[:, 1] swap = sp.in1d(links[:, 0], cell_mask) links[swap] = links[swap, ::-1] # # setting side based on index difference sides = sp.ndarray(len(links), dtype='<U6') sides[sp.where(links[:, 1] == links[:, 0]-self.nx)[0]] = 'bottom' sides[sp.where(links[:, 1] == links[:, 0]+self.nx)[0]] = 'top' sides[sp.where(links[:, 1] == links[:, 0]-1)[0]] = 'left' sides[sp.where(links[:, 1] == links[:, 0]+1)[0]] = 'right' # # adding each block to the internal face dictionary inds = sp.ravel(mapper)[links[:, 0]] for side, block_id in zip(sides, inds): boundary_dict['internal'][side].append(block_id) self.set_boundary_patches(boundary_dict, reset=True)
def set_boundary_patches(self, boundary_blocks, reset=False): r""" Sets up boundary patches based on the dictionary passed in. Overlapping declarations are overwritten by the last patch to use that face. The boundary blocks dictionary contains a dictionary entry for each patch name. - boundary_blocks dictionary has the format of: {patch_name: { <side>: [ block-list ], <side>: [ block-list ], ... }, ... } where <side> is left, right, bottom, top, front or back and block list is a list of blocks to add that patch to the side of. - reset - boolean : if True then the face labels dictionary and _faces array are re-initialized to default values """ # offsets = { 'bottom': (0, (0, 1, 2, 3)), 'back': (1, (0, 1, 5, 4)), 'right': (2, (1, 2, 6, 5)), 'front': (3, (3, 2, 6, 7)), 'left': (4, (0, 3, 7, 4)), 'top': (5, (4, 5, 6, 7)), } # # re-initializing all face labels num_faces = 6 * len(self._blocks) if reset: self._faces = sp.ones( (num_faces, 4), dtype=int) * -sp.iinfo(int).max self.face_labels = {} # # adding any new face labels to the dictionary for patch_name in boundary_blocks.keys(): key = 'boundary.' + patch_name if key not in self.face_labels.keys(): self.face_labels[key] = sp.zeros(num_faces, dtype=bool) # # setting new face labels for patch_name, side_dict in boundary_blocks.items(): for side, blocks in side_dict.items(): indices = sp.array(blocks, dtype=int) * 6 + offsets[side][0] face_verts = self._blocks[blocks][:, offsets[side][1]] self._faces[indices] = face_verts self.face_labels['boundary.' + patch_name][indices] = True # # preventing overlapping face labels for patch_name in boundary_blocks.keys(): indices = self.face_labels['boundary.' + patch_name] reset = {key: indices for key in self.face_labels.keys()} del reset['boundary.' + patch_name] for key, indices in reset.items(): self.face_labels[key][indices] = False
def set_boundary_patches(self, boundary_blocks, reset=False): r""" Sets up boundary patches based on the dictionary passed in. Overlapping declarations are overwritten by the last patch to use that face. The boundary blocks dictionary contains a dictionary entry for each patch name. - boundary_blocks dictionary has the format of: {patch_name: { <side>: [ block-list ], <side>: [ block-list ], ... }, ... } where <side> is left, right, bottom, top, front or back and block list is a list of blocks to add that patch to the side of. - reset - boolean : if True then the face labels dictionary and _faces array are re-initialized to default values """ # offsets = { 'bottom': (0, (0, 1, 2, 3)), 'back': (1, (0, 1, 5, 4)), 'right': (2, (1, 2, 6, 5)), 'front': (3, (3, 2, 6, 7)), 'left': (4, (0, 3, 7, 4)), 'top': (5, (4, 5, 6, 7)), } # # re-initializing all face labels num_faces = 6 * len(self._blocks) if reset: self._faces = sp.ones((num_faces, 4), dtype=int)*-sp.iinfo(int).max self.face_labels = {} # # adding any new face labels to the dictionary for patch_name in boundary_blocks.keys(): key = 'boundary.'+patch_name if key not in self.face_labels.keys(): self.face_labels[key] = sp.zeros(num_faces, dtype=bool) # # setting new face labels for patch_name, side_dict in boundary_blocks.items(): for side, blocks in side_dict.items(): indices = sp.array(blocks, dtype=int) * 6 + offsets[side][0] face_verts = self._blocks[blocks][:, offsets[side][1]] self._faces[indices] = face_verts self.face_labels['boundary.'+patch_name][indices] = True # # preventing overlapping face labels for patch_name in boundary_blocks.keys(): indices = self.face_labels['boundary.'+patch_name] reset = {key: indices for key in self.face_labels.keys()} del reset['boundary.'+patch_name] for key, indices in reset.items(): self.face_labels[key][indices] = False
def epochs_from_spiketrain(st, cut, end=None, with_corrected_st=False): """yields epoch set, given a spiketrain and cut parameter :type st: ndarray :param st: spiketrains as 1d array :type cut: tuple :param cut: 2-tuple of cutting parameters, (cut_left,cut_right) spike epochs will be generated by using cut_left and cut_right on the spike time. If an int is given, a symmetric cut tuple is assumed. :type end: int :param end: to determine potential problems with epochs overlapping data boundaries. If an event in the spiketrain is closer to 0 than :cut[0]: or closer to :end: than :cut[1]: the corresponding epoch will be omitted. If None, :end: will be set to max(INDEX_DTYPE) Default=None :type with_corrected_st: bool :param with_corrected_st: if True, return the corrected spiketrain by omitting spike events that cannot generate valid spike epochs given the passed cut settings. Default=False :returns: ndarray - epoch set of valid spike epochs, and if :with_corrected_st: is True additionally the corrected spike train """ # checks st = sp.asarray(st) cut = get_cut(cut) if end is None: end = sp.iinfo(INDEX_DTYPE).max else: end = INDEX_DTYPE.type(end) # return the epochs for the spiketrain st_ok = (st >= cut[0]) * (st < end - cut[1]) rval = sp.vstack(( st[st_ok] - cut[0], st[st_ok] + cut[1])).T.astype(INDEX_DTYPE) ## FIX: astype is handling float entries weird sometimes! take care to pass spiketrains as integer arrays! ## we are now correcting spike epochs to be of length sum(cut) by pruning the start of the epoch tf = sum(cut) for i in xrange(rval.shape[0]): if rval[i, 1] - rval[i, 0] != tf: rval[i, 0] = rval[i, 1] - tf ## XIF if with_corrected_st is True: return rval, st[st_ok] else: return rval
def epochs_from_spiketrain(st, cut, end=None, with_corrected_st=False): """yields epoch set, given a spiketrain and cut parameter :type st: ndarray :param st: spiketrains as 1d array :type cut: tuple :param cut: 2-tuple of cutting parameters, (cut_left,cut_right) spike epochs will be generated by using cut_left and cut_right on the spike time. If an int is given, a symmetric cut tuple is assumed. :type end: int :param end: to determine potential problems with epochs overlapping data boundaries. If an event in the spiketrain is closer to 0 than :cut[0]: or closer to :end: than :cut[1]: the corresponding epoch will be omitted. If None, :end: will be set to max(INDEX_DTYPE) Default=None :type with_corrected_st: bool :param with_corrected_st: if True, return the corrected spiketrain by omitting spike events that cannot generate valid spike epochs given the passed cut settings. Default=False :returns: ndarray - epoch set of valid spike epochs, and if :with_corrected_st: is True additionally the corrected spike train """ # checks st = sp.asarray(st) cut = get_cut(cut) if end is None: end = sp.iinfo(INDEX_DTYPE).max else: end = INDEX_DTYPE.type(end) # return the epochs for the spiketrain st_ok = (st >= cut[0]) * (st < end - cut[1]) rval = sp.vstack( (st[st_ok] - cut[0], st[st_ok] + cut[1])).T.astype(INDEX_DTYPE) ## FIX: astype is handling float entries weird sometimes! take care to pass spiketrains as integer arrays! ## we are now correcting spike epochs to be of length sum(cut) by pruning the start of the epoch tf = sum(cut) for i in xrange(rval.shape[0]): if rval[i, 1] - rval[i, 0] != tf: rval[i, 0] = rval[i, 1] - tf ## XIF if with_corrected_st is True: return rval, st[st_ok] else: return rval
def generate_offset_map(nonzero_locs, shape): r""" Creates a map storing the index of the lowest y-axis pixel in an X-Z column. """ # logger.info('creating initial offset map') # x_coords, y_coords, z_coords = sp.unravel_index(nonzero_locs, shape) data = sp.ones(shape, dtype=sp.uint16)*sp.iinfo(sp.int16).max data[x_coords, y_coords, z_coords] = y_coords # offset_map = sp.zeros((shape[0], shape[2]), dtype=sp.int16) for z_index in range(shape[2]): offset_map[:, z_index] = sp.amin(data[:, :, z_index], axis=1) offset_map[:, z_index][offset_map[:, z_index] > shape[1]] = -1 # return offset_map
def create_offset_map(self, no_data_fill=0): r""" Creates an offset map by storing the lowest voxel in each X-Z column. Parameters: no_data_fill (numeric) - a value to use as the offset when a column has no fracture voxels, sp.nan or sp.inf can be used. """ # getting coordinates of all fracture voxels x_c, y_c, z_c = self.get_fracture_voxels(coordinates=True) # # recreating 3-D array with y coordinate as data values data = sp.ones(self.shape, dtype=sp.uint16) * sp.iinfo(sp.int16).max data[x_c, y_c, z_c] = y_c del x_c, y_c, z_c # # generating offset map from data offset_map = sp.zeros((self.nx, self.nz), dtype=float) for z_ind in range(self.nz): offset_map[:, z_ind] = sp.amin(data[:, :, z_ind], axis=1) offset_map[:, z_ind][offset_map[:, z_ind] > self.ny] = no_data_fill # return offset_map.T
def generate_adjacency_matrix(conns, nonzero_locs): r""" generates a ajacency matrix based on connectivity array """ msg = 're-indexing connections array from absolute to relative indicies' logger.info(msg) mapper = sp.ones(nonzero_locs[-1] + 1, dtype=sp.uint32) * sp.iinfo( sp.uint32).max mapper[nonzero_locs] = sp.arange(nonzero_locs.size, dtype=sp.uint32) conns[:, 0] = mapper[conns[:, 0]] conns[:, 1] = mapper[conns[:, 1]] del mapper # logger.info('creating adjacency matrix...') num_blks = nonzero_locs.size row = sp.append(conns[:, 0], conns[:, 1]) col = sp.append(conns[:, 1], conns[:, 0]) weights = sp.ones(conns.size) # using size automatically multiplies by 2 # # Generate sparse adjacency matrix in 'coo' format and convert to csr adj_mat = sprs.coo_matrix((weights, (row, col)), (num_blks, num_blks)) adj_mat = adj_mat.tocsr() # return adj_mat
def _setup_region(self, region_id, z_slice, x_slice, **kwargs): r""" sets up an individual mesh region """ # sides = ['left', 'right', 'bottom', 'top'] patches = ['mergeLR{}', 'mergeRL{}', 'mergeBT{}', 'mergeTB{}'] labels = ['boundary.' + patch for patch in patches] external_patches = {side: side for side in sides} # # setting offset values and region x_offset = x_slice.start z_offset = z_slice.start region = DataFieldRegion(self.data_map[z_slice, x_slice], self.point_data[z_slice, x_slice, :]) offset_reg = DataFieldRegion(self.offset_map[z_slice, x_slice], self.offset_points[z_slice, x_slice, :]) # # creating regional mesh args = [ region, self.avg_fact, x_offset, z_offset, self.mesh_params, offset_reg ] region_mesh = BlockMeshRegion(*args) region_mesh._generate_masked_mesh(cell_mask=self._mask[z_slice, x_slice]) # # need to test for holes on merge boundaries and change patch to internal # creating map indexed 1:_blocks.size but with shape of (nz, nx) mesh_map = sp.ones(region_mesh.data_vector.size, dtype=int) mesh_map *= -sp.iinfo(int).max inds = sp.where(region_mesh.data_vector > 0)[0] mesh_map[inds] = sp.arange(inds.size) mesh_map = sp.reshape(mesh_map, region_mesh.data_map.shape) boundary_dict = { 'internal': { 'bottom': [], 'top': [], 'left': [], 'right': [] } } # # updating patches sides = {} if x_slice.start != 0: sides['left'] = 0 for iz in range(region_mesh.nz): IZ = iz + z_offset IX = x_offset if self._mask[IZ, IX] and not self._mask[IZ, IX - 1]: boundary_dict['internal']['left'].append(mesh_map[iz, 0]) # if x_slice.stop != self.nx: sides['right'] = 1 for iz in range(region_mesh.nz): IZ = iz + z_offset IX = x_offset + region_mesh.nx - 1 if self._mask[IZ, IX] and not self._mask[IZ, IX + 1]: boundary_dict['internal']['right'].append(mesh_map[iz, -1]) # if z_slice.start != 0: sides['bottom'] = 2 for ix in range(region_mesh.nx): IZ = z_offset IX = ix + x_offset if self._mask[IZ, IX] and not self._mask[IZ - 1, IX]: boundary_dict['internal']['bottom'].append(mesh_map[0, ix]) # if z_slice.stop != self.nz: sides['top'] = 3 for ix in range(region_mesh.nx): IZ = z_offset + region_mesh.nz - 1 IX = ix + x_offset if self._mask[IZ, IX] and not self._mask[IZ + 1, IX]: boundary_dict['internal']['top'].append(mesh_map[-1, ix]) # face_labels = region_mesh.face_labels for side, index in sides.items(): label = labels[index].format(region_id) external_patches[side] = patches[index].format(region_id) region_mesh.mesh_params[label + '.type'] = 'empty' region_mesh.face_labels[label] = face_labels.pop('boundary.' + side) region_mesh.set_boundary_patches(boundary_dict) # # setting up initial MergeGroup as an individual region group = MergeGroup(region_id, external_patches, kwargs.get('path', '.')) self.merge_groups.append(group) # return region_mesh
import logging import scipy as sp ##---PACKAGE-LOGGING logging.basicConfig(level=logging.DEBUG, format='') log = logging.getLogger('BOTMpy') ##---CONSTANTS ## index type INDEX_DTYPE = sp.dtype(sp.int64) ## integer max values SI8MAX = sp.iinfo(sp.int8).max SI16MAX = sp.iinfo(sp.int16).max SI32MAX = sp.iinfo(sp.int32).max SI64MAX = sp.iinfo(sp.int64).max UI8MAX = sp.iinfo(sp.uint8).max UI16MAX = sp.iinfo(sp.uint16).max UI32MAX = sp.iinfo(sp.uint32).max UI64MAX = sp.iinfo(sp.uint64).max ## CLASSES class VERBOSE(object): """verbosity manager""" # default modes
def __array_finalize__(self, obj): # # setting the type of integer that fits the flattened array index itype = sp.uint32 if (self.size < sp.iinfo(sp.uint32).max) else sp.uint self.index_int_type = itype
def _generate_masked_mesh(self, cell_mask=None): r""" Generates the mesh based on the cell mask provided """ # if cell_mask is None: cell_mask = sp.ones(self.data_map.shape, dtype=bool) # # initializing arrays self._edges = sp.ones(0, dtype=str) self._merge_patch_pairs = sp.ones(0, dtype=str) self._create_blocks(cell_mask) # # building face arrays mapper = sp.ravel(sp.array(cell_mask, dtype=int)) mapper[mapper == 1] = sp.arange(sp.count_nonzero(mapper)) mapper = sp.reshape(mapper, (self.nz, self.nx)) mapper[~cell_mask] = -sp.iinfo(int).max # boundary_dict = { 'bottom': { 'bottom': mapper[0, :][cell_mask[0, :]] }, 'top': { 'top': mapper[-1, :][cell_mask[-1, :]] }, 'left': { 'left': mapper[:, 0][cell_mask[:, 0]] }, 'right': { 'right': mapper[:, -1][cell_mask[:, -1]] }, 'front': { 'front': mapper[cell_mask] }, 'back': { 'back': mapper[cell_mask] }, 'internal': { 'bottom': [], 'top': [], 'left': [], 'right': [] } } # # determining cells linked to a masked cell cell_mask = sp.where(~sp.ravel(cell_mask))[0] inds = sp.in1d(self._field._cell_interfaces, cell_mask) inds = sp.reshape(inds, (len(self._field._cell_interfaces), 2)) inds = inds[:, 0].astype(int) + inds[:, 1].astype(int) inds = (inds == 1) links = self._field._cell_interfaces[inds] # # adjusting order so masked cells are all on links[:, 1] swap = sp.in1d(links[:, 0], cell_mask) links[swap] = links[swap, ::-1] # # setting side based on index difference sides = sp.ndarray(len(links), dtype='<U6') sides[sp.where(links[:, 1] == links[:, 0] - self.nx)[0]] = 'bottom' sides[sp.where(links[:, 1] == links[:, 0] + self.nx)[0]] = 'top' sides[sp.where(links[:, 1] == links[:, 0] - 1)[0]] = 'left' sides[sp.where(links[:, 1] == links[:, 0] + 1)[0]] = 'right' # # adding each block to the internal face dictionary inds = sp.ravel(mapper)[links[:, 0]] for side, block_id in zip(sides, inds): boundary_dict['internal'][side].append(block_id) self.set_boundary_patches(boundary_dict, reset=True)
def _setup_region(self, region_id, z_slice, x_slice, **kwargs): r""" sets up an individual mesh region """ # sides = ['left', 'right', 'bottom', 'top'] patches = ['mergeLR{}', 'mergeRL{}', 'mergeBT{}', 'mergeTB{}'] labels = ['boundary.'+patch for patch in patches] external_patches = {side: side for side in sides} # # setting offset values and region x_offset = x_slice.start z_offset = z_slice.start region = DataFieldRegion(self.data_map[z_slice, x_slice], self.point_data[z_slice, x_slice, :]) offset_reg = DataFieldRegion(self.offset_map[z_slice, x_slice], self.offset_points[z_slice, x_slice, :]) # # creating regional mesh args = [region, self.avg_fact, x_offset, z_offset, self.mesh_params, offset_reg] region_mesh = BlockMeshRegion(*args) region_mesh._generate_masked_mesh(cell_mask=self._mask[z_slice, x_slice]) # # need to test for holes on merge boundaries and change patch to internal # creating map indexed 1:_blocks.size but with shape of (nz, nx) mesh_map = sp.ones(region_mesh.data_vector.size, dtype=int) mesh_map *= -sp.iinfo(int).max inds = sp.where(region_mesh.data_vector > 0)[0] mesh_map[inds] = sp.arange(inds.size) mesh_map = sp.reshape(mesh_map, region_mesh.data_map.shape) boundary_dict = { 'internal': {'bottom': [], 'top': [], 'left': [], 'right': []} } # # updating patches sides = {} if x_slice.start != 0: sides['left'] = 0 for iz in range(region_mesh.nz): IZ = iz + z_offset IX = x_offset if self._mask[IZ, IX] and not self._mask[IZ, IX-1]: boundary_dict['internal']['left'].append(mesh_map[iz, 0]) # if x_slice.stop != self.nx: sides['right'] = 1 for iz in range(region_mesh.nz): IZ = iz + z_offset IX = x_offset + region_mesh.nx - 1 if self._mask[IZ, IX] and not self._mask[IZ, IX+1]: boundary_dict['internal']['right'].append(mesh_map[iz, -1]) # if z_slice.start != 0: sides['bottom'] = 2 for ix in range(region_mesh.nx): IZ = z_offset IX = ix + x_offset if self._mask[IZ, IX] and not self._mask[IZ-1, IX]: boundary_dict['internal']['bottom'].append(mesh_map[0, ix]) # if z_slice.stop != self.nz: sides['top'] = 3 for ix in range(region_mesh.nx): IZ = z_offset + region_mesh.nz - 1 IX = ix + x_offset if self._mask[IZ, IX] and not self._mask[IZ+1, IX]: boundary_dict['internal']['top'].append(mesh_map[-1, ix]) # face_labels = region_mesh.face_labels for side, index in sides.items(): label = labels[index].format(region_id) external_patches[side] = patches[index].format(region_id) region_mesh.mesh_params[label+'.type'] = 'empty' region_mesh.face_labels[label] = face_labels.pop('boundary.'+side) region_mesh.set_boundary_patches(boundary_dict) # # setting up initial MergeGroup as an individual region group = MergeGroup(region_id, external_patches, kwargs.get('path', '.')) self.merge_groups.append(group) # return region_mesh