def create_unique_global_array(arr): """ Create a distributed NumPy array containing unique elements. If the rank has no unique items, an array with zero elements will be returned. This call is collective across the current VM. :param arr: Input array for unique operation. :type arr: :class:`numpy.ndarray` :rtype: :class:`numpy.ndarray` :raises: ValueError """ from ocgis import vm if arr is None: raise ValueError('Input must be a NumPy array.') from ocgis.vmachine.mpi import rank_print rank_print('starting np.unique') unique_local = np.unique(arr) rank_print('finished np.unique') rank_print('waiting at barrier1') vm.barrier() tag_unique_count = MPITag.UNIQUE_GLOBAL_COUNT tag_unique_check = MPITag.UNIQUE_GLOBAL_CHECK for root_rank in vm.ranks: rank_print('root_rank=', root_rank) if vm.rank == root_rank: has_unique_local = len(unique_local) != 0 else: has_unique_local = None has_unique_local = vm.bcast(has_unique_local, root=root_rank) if has_unique_local: if vm.rank == root_rank: for rank in vm.ranks: if rank != vm.rank: vm.comm.send(len(unique_local), dest=rank, tag=tag_unique_count) for u in unique_local: for rank in vm.ranks: if rank != vm.rank: vm.comm.send(u, dest=rank, tag=tag_unique_check) else: recv_count = vm.comm.recv(source=root_rank, tag=tag_unique_count) for _ in range(recv_count): u = vm.comm.recv(source=root_rank, tag=tag_unique_check) if u in unique_local: select = np.invert(unique_local == u) unique_local = unique_local[select] rank_print('waiting at barrier 2') vm.barrier() return unique_local
def main(): rd = RequestDataset(IN_PATH, driver=DriverNetcdfUGRID, grid_abstraction=GridAbstraction.POINT) field = rd.get() foo = '/tmp/foo.nc' # assert field.grid.cindex is not None # print field.grid.archetype # tkk print field.shapes sub = field.grid.get_intersects(box(*BBOX), optimized_bbox_subset=True).parent with vm.scoped_by_emptyable('reduce global', sub): if not vm.is_null: sub.grid_abstraction = GridAbstraction.POLYGON # rank_print('sub.grid.abstraction', sub.grid.abstraction) # rank_print('sub.grid._abstraction', sub.grid._abstraction) # rank_print('archetype', sub.grid.archetype) # rank_print(sub.grid.extent) rank_print('sub', sub.grid.cindex.get_value()) subr = sub.grid.reduce_global().parent rank_print('sub', subr.grid.cindex.get_value()) # rank_print(subr.x.name) # rank_print(subr.x.get_value().min()) rank_print(subr.grid.extent) # rank_print(subr.grid.cindex.get_value()) # rank_print(subr.shapes) # subr.write(foo) # if vm.rank == 0: # RequestDataset(foo).inspect() vm.barrier()
def rank_print(*args, **kwargs): from ocgis.vmachine.mpi import rank_print rank_print(*args, **kwargs)
def get_subset(bbox, subset_filename, buffer_width, rhs_tol=10.): rd = ocgis.RequestDataset(uri=IN_PATH) rd.metadata['dimensions']['nlandmesh_face']['dist'] = True vc = rd.get_raw_field() # ------------------------------------------------------------------------------------------------------------------ # Subset the face centers and accumulate the indices of face centers occurring inside the bounding box selection. start_index = vc[MESHVAR].attrs.get('start_index', 0) # Stores indices of faces contained in the bounding box. px = vc[FACE_CENTER_X].extract().get_value() py = vc[FACE_CENTER_Y].extract().get_value() # Handle bounding box wrapping. This requires creating two bounding boxes to capture the left and right side of the # sphere. buffered_bbox = box(*bbox).buffer(buffer_width).envelope.bounds if buffered_bbox[0] < 0: bbox_rhs = list(deepcopy(buffered_bbox)) bbox_rhs[0] = buffered_bbox[0] + 360. bbox_rhs[2] = 360. + rhs_tol bboxes = [buffered_bbox, bbox_rhs] else: bboxes = [buffered_bbox] initial = None for ctr, curr_bbox in enumerate(bboxes): select = create_boolean_select_array(curr_bbox, px, py, initial=initial) initial = select # ------------------------------------------------------------------------------------------------------------------ # Use the selection criteria to extract associated nodes and reindex the new coordinate arrays. from ocgis.vmachine.mpi import rank_print # Retrieve the live ranks following the subset. has_select = ocgis.vm.gather(select.any()) if ocgis.vm.rank == 0: live_ranks = np.array(ocgis.vm.ranks)[has_select] else: live_ranks = None live_ranks = ocgis.vm.bcast(live_ranks) with ocgis.vm.scoped('live ranks', live_ranks): if not ocgis.vm.is_null: has_subset = True rank_print('live ranks:', ocgis.vm.ranks) sub = vc[FACE_NODE].get_distributed_slice([select, slice(None)]).parent cindex = sub[FACE_NODE] cindex_original_shape = cindex.shape cindex_value = cindex.get_value().flatten() if start_index > 0: cindex_value -= start_index vc_coords = vc[XVAR][cindex_value].parent archetype_dim = vc_coords[XVAR].dimensions[0] arange_dimension = create_distributed_dimension(cindex_value.shape[0], name='arange_dim') new_cindex_value = arange_from_dimension(arange_dimension, start=start_index) cindex.set_value(new_cindex_value.reshape(*cindex_original_shape)) new_vc_coords_dimension = create_distributed_dimension(vc_coords[XVAR].shape[0], name=archetype_dim.name, src_idx=archetype_dim._src_idx) vc_coords.dimensions[archetype_dim.name] = new_vc_coords_dimension # ------------------------------------------------------------------------------------------------------------------ # Format the new variable collection and write out the new data file. # Remove old coordinate variables. for to_modify in [XVAR, YVAR]: sub[to_modify].extract(clean_break=True) for to_add in [XVAR, YVAR]: var_to_add = vc_coords[to_add].extract() sub.add_variable(var_to_add) rank_print('start sub.write') sub.write(subset_filename) rank_print('finished sub.write') if ocgis.vm.rank == 0: print 'subset x extent:', sub[FACE_CENTER_X].extent print 'subset y extent:', sub[FACE_CENTER_Y].extent ocgis.RequestDataset(subset_filename).inspect() else: has_subset = False return has_subset