def area_exceeding_by_threshold_stats_reduce(result, thresholds, file_names, scalar): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'Invalid result argument in "area exceeding by threshold stats reduce". Received type: %s' % type(result)) if not hasattr(thresholds, '__iter__'): raise PyStochOperatorError( 'Invalid thresholds argument in "area exceeding by threshold stats reduce". Received type: %s' % type(thresholds)) flist = [] for fnum in xrange(file_names.shape[0]): fname = ''.join(file_names[fnum, :].tolist()) fname = fname[:fname.index('\n')] flist.append(fname) #count = 0 while True: # Take temp as input from an operation and sum it with the aggregated result array (map_array, reduction_metadata) = (yield) fnum = flist.index(reduction_metadata[FILE_NAME]) #@todo replace with nditer for i in xrange(thresholds.shape[0]): area_oiled = scalar * numpy.sum(map_array >= thresholds[i]) result[i, fnum] = area_oiled
def thickest_spillet_op(result): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'invalid result argument in thickest_spillet_op operation. Received type: %s' % type(result)) try: if grid_max is not None: while True: block, index_position, weight, metadata = (yield) radius = block['radius'] spillet_thickness = block['mass'] / block['density'] / ( numpy.pi * radius**2) * numpy.ones(weight.shape) n = grid_max.c_grid_max(result, index_position, spillet_thickness.astype('float32')) else: while True: block, index_position, weight, metadata = (yield) try: spillet_thickness = block['thickness'] except ValueError: radius = block['radius'] spillet_thickness = block['mass'] / block['density'] / ( numpy.pi * radius**2) * numpy.ones(weight.shape) blen = index_position.shape[0] for i in xrange(blen): # Can't sum - need to take the max value over all time step... result[tuple(index_position[i, :])] = max( result[tuple(index_position[i, :])], spillet_thickness[i]) #result[[index_position[:,i] for i in xrange(index_position.shape[1])]] = max(result[[index_position[:,i] for i in xrange(index_position.shape[1])]], spillet_thickness) except IndexError as ie: logger.info('block:\n%s' % block) logger.info('index_position:\n%s' % index_position) logger.info('weight:\n%s' % weight) logger.info('Spillet Thickness: %s' % type(spillet_thickness)) logger.info('Result Array shape: %s' % list(result.shape)) logger.error('Hit an index exception in thickest_spillet_op coroutine') raise except ValueError as ve: logger.info('block:\n%s' % block) logger.info( 'Hit a value error during thickest_spillet_op coroutine - probably do to a spillet type that does not have a radius' ) raise
def distribution_reduce(result, bin_coefficient, nsims): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'Invalid result argument in distribution_reduce. Received type: %s' % type(result)) dims = result.shape if not len(dims) == 3: raise PyStochOperatorError( 'invalid result argument in distribution_reduce operation. Expected array of rank 3, got rank %s' % len(dims)) grid_dims = dims[1:3] nbins = dims[0] bin_index = numpy.zeros(grid_dims, dtype=DT.INT32) xi, yi = numpy.indices(grid_dims) try: #count = 0 while True: # Take temp as input from an operation and sum it with the aggregated result array (temp, reduction_metadata) = (yield) numpy.floor_divide(numpy.sqrt(temp), bin_coefficient, out=bin_index) bin_index[bin_index >= nbins] = nbins - 1 # Make sure the bin index value never causes an index error result[bin_index, xi, yi] += 1 except GeneratorExit: #print 'Calling cumsum!' # Offset by one because we want >= to the bin value result[1:, ...] = result[:-1, ...].cumsum(axis=0) # Need to use the ufunc interface to specify out! #result = nsims - result # effectively flip the direction of the cumsum numpy.add(nsims, -1 * result, out=result) result[0, ...] = nsims
def sum_reduce(result): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError('Invalid result argument in count operation. Received type: %s' % type(result)) while True: # Take temp as input from an operation and sum it with the aggregated result array (temp,reduction_metadata) = (yield) numpy.add(temp,result, out=result)
def probability_reduce(result, exceeds_val): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'Invalid result argument in probability_reduce. Received type: %s' % type(result)) #count = 0 while True: logger.info('Deprecated!') (temp, reduction_metadata) = (yield) result[temp > exceeds_val] += 1
def max_shore_grid_thickness_op(result, shore_length, min_time=None): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'Invalid result argument in max_shore_grid_thickness_op operation. Received type: %s' % type(result)) aggregator = result.copy() aggregator[:] = 0.0 try: #count = 0 while True: block, index_position, weight, metadata = ( yield) # Standard arguments passed from the grid_mapper weighted_oil_thickness = block['mass'] / block[ 'density'] / weight / (shore_length * block['shoreline_width']) blen = index_position.shape[0] for i in xrange(blen): # Can't sum - need to take the max value over all time step... aggregator[tuple( index_position[i, :])] += weighted_oil_thickness[i] n_of_m_tsteps = metadata[BLOCK][N_OF_M_IN_TS] if n_of_m_tsteps[0] == n_of_m_tsteps[ 1]: # if this is the last block in the timestep if min_time is not None: raise NotImplementedError( 'Need to implement this function!') numpy.maximum(aggregator, result, out=result) # Previous method - incorrect! #result[aggregator > 0.0] = aggregator[aggregator > 0.0] aggregator[:] = 0.0 except IndexError as ie: logger.info('block:\n%s' % block) logger.info('index_position:\n%s' % index_position) logger.info('weight:\n%s' % weight) logger.info('Result Array shape: %s' % list(result.shape)) logger.error('Hit an index exception in counterp_op coroutine') raise
def exceed_reduce(result, exceedance_value): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'Invalid result argument in exceed_reduce. Received type: %s' % type(result)) try: #count = 0 while True: # Take temp as input from an operation and sum it with the aggregated result array (temp, reduction_metadata) = (yield) result[temp > exceedance_value] += 1 except GeneratorExit: pass
def max_reduce(result): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError('Invalid result argument in max reduce. Received type: %s' % type(result)) # figure out why value to use to reset the temp array passed from the map function #try: # reset_val = numpy.iinfo(result.dtype).min #except ValueError: # reset_val = result.dtype.type('-inf') #count = 0 while True: # Take temp as input from an operation and sum it with the aggregated result array (temp,reduction_metadata) = (yield) numpy.maximum(temp, result, out=result)
def min_time_op(result): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'Invalid result argument in min time operation. Received type: %s' % type(result)) try: #count = 0 while True: block, index_position, weight, metadata = ( yield) # Standard arguments passed from the grid_mapper #logger.info('block %s' % list(block.shape)) #logger.info('index_pos %s' % list(index_position.shape)) #logger.info('weight %s' % list(weight.shape)) #logger.info('weight %s' % weight) #logger.info(metadata) blen = index_position.shape[0] #fancy vectorized way of taking the minimum from the cells where particles are indexed. result[[ index_position[:, i] for i in xrange(index_position.shape[1]) ]] = numpy.minimum( result[[ index_position[:, i] for i in xrange(index_position.shape[1]) ]], metadata[TIME][ETIME]) #count +=1 #logger.info('Block count: %s; Block size: %s' % (count, len(block))) except IndexError as ie: logger.info('block:\n%s' % block) logger.info('index_position:\n%s' % index_position) logger.info('weight:\n%s' % weight) logger.info('Result Array shape: %s' % list(result.shape)) logger.error('Hit an index exception in oil_volume_op coroutine') raise
def clean_array(): try: #count = 0 while True: # Take temp as input from an operation and sum it with the aggregated result array (temp, reset_value) = (yield) if isinstance(temp, numpy.ndarray): temp[:] = reset_value elif isinstance(temp, dict) and isinstance(reset_value, dict): temp.update(reset_value) else: PyStochOperatorError( "Received bad arguments to clean_array: temp - %s; reset_value - %s" % (temp, reset_value)) except GeneratorExit: pass
def max_concentration_op(result, cell_depth_range, cell_area): """ Cell depth range is specified positive downward (upper/surface, lower/bottom) Cell area is in square meters """ if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'Invalid result argument in max_concentration_op. Received type: %s' % type(result)) aggregator = result.copy() aggregator[:] = 0.0 cell_height = cell_depth_range[1] - cell_depth_range[0] cell_volume = cell_height * cell_area try: #count = 0 while True: block, index_position, weighting, metadata = ( yield) # Standard arguments passed from the grid_mapper weighted_oil_concentration = block['mass'] / weighting / cell_volume #logger.info('block shape: %s' % list(block.shape)) #logger.info('index_position shape: %s' % list(index_position.shape)) if block.shape == tuple(): #logger.info('block shape empty!') if block['zm'] >= cell_depth_range[0] and block[ 'zm'] < cell_depth_range[1]: for i in xrange(index_position.shape[0]): aggregator[tuple(index_position[ i, :])] += weighted_oil_concentration[i] elif block.shape == (index_position.shape[0], ): #logger.info('block shape full!') depth = block['zm'] # assuming positive downward in_range = numpy.logical_and(depth >= cell_depth_range[0], depth < cell_depth_range[1]) # tried to do this with nditer but failed. Need to come back to optimize later... for i in xrange(len(in_range)): if in_range[i]: index_tuple = tuple(index_position[i, :]) oil_c = weighted_oil_concentration[i] aggregator[index_tuple] += oil_c else: raise IndexError( 'Incompatible block shape and index_position shape: %s, %s' % (block.shape, index_position.shape)) n_of_m_tsteps = metadata[BLOCK][N_OF_M_IN_TS] if n_of_m_tsteps[0] == n_of_m_tsteps[ 1]: # if this is the last block in the timestep numpy.maximum(aggregator, result, out=result) aggregator[:] = 0.0 except IndexError as ie: logger.info('block:\n%s' % block) logger.info('index_position:\n%s' % index_position) logger.info('weighting:\n%s' % weighting) logger.info('Result Array shape: %s' % list(result.shape)) logger.error( 'Hit an index exception in max_concentration_op coroutine') raise
def counter_op(result): if not isinstance(result, numpy.ndarray): raise PyStochOperatorError( 'Invalid result argument in count operation. Received type: %s' % type(result)) try: if counter is not None: logger.info('In cython while loop for counting hits') while True: block, index_position, weight, metadata = ( yield) # Standard arguments passed from the grid_mapper # Call cython implementation counter.c_counter(result, index_position) elif sparse is not None: logger.info('In scipy while loop for counting hits') while True: block, index_position, weight, metadata = ( yield) # Standard arguments passed from the grid_mapper rank = index_position.shape[1] blen = index_position.shape[0] da = sparse.coo_matrix( (numpy.ones(blen), [index_position[:, i] for i in xrange(rank)]), result.shape) result += da.A else: logger.info('In pure python while loop for counting hits :-(') while True: block, index_position, weight, metadata = ( yield) # Standard arguments passed from the grid_mapper ## Works but slow... for i in xrange(index_position.shape[0]): result[tuple(index_position[i, :])] += 1 ### does not work #for x in numpy.nditer(result[[index_position[:,i] for i in xrange(index_position.shape[1])]], op_flags=['readwrite'],flags=['buffered']): # x[...] +=1 #result[[index_position[:,i] for i in xrange(index_position.shape[1])]] = x #for x in numpy.nditer(index_position, flags=['external_loop'], order='C'): # print x # result[x] += 1 except IndexError as ie: logger.info('block:\n%s' % block) logger.info('index_position:\n%s' % index_position) logger.info('weight:\n%s' % weight) logger.info('Result Array shape: %s' % list(result.shape)) logger.error('Hit an index exception in counterp_op coroutine') raise except ValueError as ve: logger.info('Result dtype & shape: %s; %s' % (result.dtype, result.shape)) logger.info("index_position: %s, %s" % (index_position.dtype, index_position.shape)) logger.info( "Value error raised during call cython.c_counter - usually means one of the two arrays was of the wrong type or dimension" ) raise