def boundary_interaction(self, **kwargs): """ Returns a list of Location4D objects """ particle = kwargs.pop('particle') starting = kwargs.pop('starting') ending = kwargs.pop('ending') # shoreline if self.useshore: intersection_point = self._shoreline.intersect(start_point=starting.point, end_point=ending.point) if intersection_point is not None: # Set the intersection point. hitpoint = Location4D(point=intersection_point['point'], time=starting.time + (ending.time - starting.time)) particle.location = hitpoint # This relies on the shoreline to put the particle in water and not on shore. resulting_point = self._shoreline.react(start_point=starting, end_point=ending, hit_point=hitpoint, reverse_distance=self.reverse_distance, feature=intersection_point['feature'], distance=kwargs.get('distance'), angle=kwargs.get('angle'), azimuth=kwargs.get('azimuth'), reverse_azimuth=kwargs.get('reverse_azimuth')) ending.latitude = resulting_point.latitude ending.longitude = resulting_point.longitude ending.depth = resulting_point.depth if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - hit the shoreline at %s. Setting location to %s." % (particle.logstring(), hitpoint.logstring(), ending.logstring())) # bathymetry if self.usebathy: if not particle.settled: bintersect = self._bathymetry.intersect(start_point=starting, end_point=ending) if bintersect: pt = self._bathymetry.react(type='reverse', start_point=starting, end_point=ending) if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - hit the bottom at %s. Setting location to %s." % (particle.logstring(), ending.logstring(), pt.logstring())) ending.latitude = pt.latitude ending.longitude = pt.longitude ending.depth = pt.depth # sea-surface if self.usesurface: if ending.depth > 0: if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - rose out of the water. Setting depth to 0." % particle.logstring()) ending.depth = 0 particle.location = ending
def need_data(self, i): """ Method to test if cache contains the data that the particle needs """ if logger.isEnabledFor(logging.DEBUG): logger.debug("Checking cache for data availability at %s." % self.particle.location.logstring()) try: # Tell the DataController that we are going to be reading from the file with self.read_lock: self.read_count.value += 1 self.has_read_lock.append(os.getpid()) self.dataset.opennc() # Test if the cache has the data we need # If the point we request contains fill values, # we need data cached_lookup = self.dataset.get_values( 'domain', timeinds=[np.asarray([i])], point=self.particle.location) if logger.isEnabledFor(logging.DEBUG): logger.debug("Type of result: %s" % type(cached_lookup)) logger.debug("Double mean of result: %s" % np.mean(np.mean(cached_lookup))) logger.debug("Type of Double mean of result: %s" % type(np.mean(np.mean(cached_lookup)))) if type(np.mean( np.mean(cached_lookup))) == np.ma.core.MaskedConstant: need = True if logger.isEnabledFor(logging.DEBUG): logger.debug("I NEED data. Got back: %s" % cached_lookup) else: need = False logger.debug("I DO NOT NEED data") except Exception: # If the time index doesnt even exist, we need need = True logger.debug("I NEED data (no time index exists in cache)") finally: self.dataset.closenc() with self.read_lock: self.read_count.value -= 1 self.has_read_lock.remove(os.getpid()) return need # Returns True if we need data or False if we dont
def need_data(self, i): """ Method to test if cache contains the data that the particle needs """ if logger.isEnabledFor(logging.DEBUG): logger.debug("Checking cache for data availability at %s." % self.particle.location.logstring()) try: # Tell the DataController that we are going to be reading from the file with self.read_lock: self.read_count.value += 1 self.has_read_lock.append(os.getpid()) self.dataset.opennc() # Test if the cache has the data we need # If the point we request contains fill values, # we need data cached_lookup = self.dataset.get_values('domain', timeinds=[np.asarray([i])], point=self.particle.location) if logger.isEnabledFor(logging.DEBUG): logger.debug("Type of result: %s" % type(cached_lookup)) logger.debug("Double mean of result: %s" % np.mean(np.mean(cached_lookup))) logger.debug("Type of Double mean of result: %s" % type(np.mean(np.mean(cached_lookup)))) if type(np.mean(np.mean(cached_lookup))) == np.ma.core.MaskedConstant: need = True if logger.isEnabledFor(logging.DEBUG): logger.debug("I NEED data. Got back: %s" % cached_lookup) else: need = False logger.debug("I DO NOT NEED data") except Exception: # If the time index doesnt even exist, we need need = True logger.debug("I NEED data (no time index exists in cache)") finally: self.dataset.closenc() with self.read_lock: self.read_count.value -= 1 self.has_read_lock.remove(os.getpid()) return need # Returns True if we need data or False if we dont
def get_remote_data(self, localvars, remotevars, inds, shape): """ Method that does the updating of local netcdf cache with remote data """ # If user specifies 'all' then entire xy domain is # grabbed, default is 4, specified in the model controller if self.horiz_size == 'all': y, y_1 = 0, shape[-2] x, x_1 = 0, shape[-1] else: r = self.horiz_size x, x_1 = self.point_get.value[2] - r, self.point_get.value[ 2] + r + 1 y, y_1 = self.point_get.value[1] - r, self.point_get.value[ 1] + r + 1 x, x_1 = x[0], x_1[0] y, y_1 = y[0], y_1[0] if y < 0: y = 0 if x < 0: x = 0 if y_1 > shape[-2]: y_1 = shape[-2] if x_1 > shape[-1]: x_1 = shape[-1] # Update domain variable for where we will add data domain = self.local.variables['domain'] if len(shape) == 4: domain[inds[0]:inds[-1] + 1, 0:shape[1], y:y_1, x:x_1] = np.ones( (inds[-1] + 1 - inds[0], shape[1], y_1 - y, x_1 - x)) elif len(shape) == 3: domain[inds[0]:inds[-1] + 1, y:y_1, x:x_1] = np.ones( (inds[-1] + 1 - inds[0], y_1 - y, x_1 - x)) # Update the local variables with remote data if logger.isEnabledFor(logging.DEBUG): logger.debug( "Filling cache with: Time - %s:%s, Lat - %s:%s, Lon - %s:%s" % (str(inds[0]), str(inds[-1] + 1), str(y), str(y_1), str(x), str(x_1))) for local, remote in zip(localvars, remotevars): if len(shape) == 4: local[inds[0]:inds[-1] + 1, 0:shape[1], y:y_1, x:x_1] = remote[inds[0]:inds[-1] + 1, 0:shape[1], y:y_1, x:x_1] else: local[inds[0]:inds[-1] + 1, y:y_1, x:x_1] = remote[inds[0]:inds[-1] + 1, y:y_1, x:x_1]
def get_remote_data(self, localvars, remotevars, inds, shape): """ Method that does the updating of local netcdf cache with remote data """ # If user specifies 'all' then entire xy domain is # grabbed, default is 4, specified in the model controller if self.horiz_size == 'all': y, y_1 = 0, shape[-2] x, x_1 = 0, shape[-1] else: r = self.horiz_size x, x_1 = self.point_get.value[2]-r, self.point_get.value[2]+r+1 y, y_1 = self.point_get.value[1]-r, self.point_get.value[1]+r+1 x, x_1 = x[0], x_1[0] y, y_1 = y[0], y_1[0] if y < 0: y = 0 if x < 0: x = 0 if y_1 > shape[-2]: y_1 = shape[-2] if x_1 > shape[-1]: x_1 = shape[-1] # Update domain variable for where we will add data domain = self.local.variables['domain'] if len(shape) == 4: domain[inds[0]:inds[-1]+1, 0:shape[1], y:y_1, x:x_1] = np.ones((inds[-1]+1-inds[0], shape[1], y_1-y, x_1-x)) elif len(shape) == 3: domain[inds[0]:inds[-1]+1, y:y_1, x:x_1] = np.ones((inds[-1]+1-inds[0], y_1-y, x_1-x)) # Update the local variables with remote data if logger.isEnabledFor(logging.DEBUG): logger.debug("Filling cache with: Time - %s:%s, Lat - %s:%s, Lon - %s:%s" % (str(inds[0]), str(inds[-1]+1), str(y), str(y_1), str(x), str(x_1))) for local, remote in zip(localvars, remotevars): if len(shape) == 4: local[inds[0]:inds[-1]+1, 0:shape[1], y:y_1, x:x_1] = remote[inds[0]:inds[-1]+1, 0:shape[1], y:y_1, x:x_1] else: local[inds[0]:inds[-1]+1, y:y_1, x:x_1] = remote[inds[0]:inds[-1]+1, y:y_1, x:x_1]
def run(self): self.load_initial_dataset() redis_connection = None if self.redis_url is not None and self.redis_results_channel is not None: import redis redis_connection = redis.from_url(self.redis_url) # Setup shoreline self._shoreline = None if self.useshore is True: self._shoreline = Shoreline(path=self.shoreline_path, feature_name=self.shoreline_feature, point=self.release_location_centroid, spatialbuffer=self.shoreline_index_buffer) # Make sure we are not starting on land. Raises exception if we are. self._shoreline.intersect(start_point=self.release_location_centroid, end_point=self.release_location_centroid) # Setup Bathymetry if self.usebathy is True: try: self._bathymetry = Bathymetry(file=self.bathy_path) except Exception: logger.exception("Could not load Bathymetry file: %s, using no Bathymetry for this run!" % self.bathy_path) self.usebathy = False # Calculate datetime at every timestep modelTimestep, newtimes = AsaTransport.get_time_objects_from_model_timesteps(self.times, start=self.start_time) if self.time_method == 'interp': time_indexs = self.timevar.nearest_index(newtimes, select='before') elif self.time_method == 'nearest': time_indexs = self.timevar.nearest_index(newtimes) else: logger.warn("Method for computing u,v,w,temp,salt not supported!") try: assert len(newtimes) == len(time_indexs) except AssertionError: logger.exception("Time indexes are messed up. Need to have equal datetime and time indexes") raise # Keep track of how much time we spend in each area. tot_boundary_time = 0. tot_model_time = {} tot_read_data = 0. for m in self.models: tot_model_time[m.name] = 0. # Set the base conditions # If using Redis, send the results if redis_connection is not None: redis_connection.publish(self.redis_results_channel, json.dumps(self.particle.timestep_dump())) # loop over timesteps # We don't loop over the last time_index because # we need to query in the time_index and set the particle's # location as the 'newtime' object. for loop_i, i in enumerate(time_indexs[0:-1]): if self.active and self.active.value is False: raise ValueError("Particle exiting due to Failure.") newloc = None st = time.clock() # Get the variable data required by the models if self.time_method == 'nearest': u, v, w, temp, salt = self.get_nearest_data(i) elif self.time_method == 'interp': u, v, w, temp, salt = self.get_linterp_data(i, newtimes[loop_i]) else: logger.warn("Method for computing u,v,w,temp,salt is unknown. Only 'nearest' and 'interp' are supported.") tot_read_data += (time.clock() - st) # Get the bathy value at the particles location if self.usebathy is True: bathymetry_value = self._bathymetry.get_depth(self.particle.location) else: bathymetry_value = -999999999999999 # Age the particle by the modelTimestep (seconds) # 'Age' meaning the amount of time it has been forced. self.particle.age(seconds=modelTimestep[loop_i]) # loop over models - sort these in the order you want them to run for model in self.models: st = time.clock() movement = model.move(self.particle, u, v, w, modelTimestep[loop_i], temperature=temp, salinity=salt, bathymetry_value=bathymetry_value) newloc = Location4D(latitude=movement['latitude'], longitude=movement['longitude'], depth=movement['depth'], time=newtimes[loop_i+1]) tot_model_time[m.name] += (time.clock() - st) if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - moved %.3f meters (horizontally) and %.3f meters (vertically) by %s with data from %s" % (self.particle.logstring(), movement['distance'], movement['vertical_distance'], model.__class__.__name__, newtimes[loop_i].isoformat())) if newloc: st = time.clock() self.boundary_interaction(particle=self.particle, starting=self.particle.location, ending=newloc, distance=movement['distance'], angle=movement['angle'], azimuth=movement['azimuth'], reverse_azimuth=movement['reverse_azimuth'], vertical_distance=movement['vertical_distance'], vertical_angle=movement['vertical_angle']) tot_boundary_time += (time.clock() - st) if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - was forced by %s and is now at %s" % (self.particle.logstring(), model.__class__.__name__, self.particle.location.logstring())) self.particle.note = self.particle.outputstring() # Each timestep, save the particles status and environmental variables. # This keep fields such as temp, salt, halted, settled, and dead matched up with the number of timesteps self.particle.save() # If using Redis, send the results if redis_connection is not None: redis_connection.publish(self.redis_results_channel, json.dumps(self.particle.timestep_dump())) self.dataset.closenc() # We won't pull data for the last entry in locations, but we need to populate it with fill data. self.particle.fill_gap() if self.usebathy is True: self._bathymetry.close() if self.useshore is True: self._shoreline.close() logger.info(textwrap.dedent('''Particle %i Stats: Data read: %f seconds Model forcing: %s seconds Boundary intersection: %f seconds''' % (self.particle.uid, tot_read_data, { s: '{:g} seconds'.format(f) for s, f in list(tot_model_time.items()) }, tot_boundary_time))) return self.particle
def boundary_interaction(self, **kwargs): """ Returns a list of Location4D objects """ particle = kwargs.pop('particle') starting = kwargs.pop('starting') ending = kwargs.pop('ending') # shoreline if self.useshore: intersection_point = self._shoreline.intersect( start_point=starting.point, end_point=ending.point) if intersection_point is not None: # Set the intersection point. hitpoint = Location4D(point=intersection_point['point'], time=starting.time + (ending.time - starting.time)) particle.location = hitpoint # This relies on the shoreline to put the particle in water and not on shore. resulting_point = self._shoreline.react( start_point=starting, end_point=ending, hit_point=hitpoint, reverse_distance=self.reverse_distance, feature=intersection_point['feature'], distance=kwargs.get('distance'), angle=kwargs.get('angle'), azimuth=kwargs.get('azimuth'), reverse_azimuth=kwargs.get('reverse_azimuth')) ending.latitude = resulting_point.latitude ending.longitude = resulting_point.longitude ending.depth = resulting_point.depth if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - hit the shoreline at %s. Setting location to %s." % (particle.logstring(), hitpoint.logstring(), ending.logstring())) # bathymetry if self.usebathy: if not particle.settled: bintersect = self._bathymetry.intersect(start_point=starting, end_point=ending) if bintersect: pt = self._bathymetry.react(type='reverse', start_point=starting, end_point=ending) if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - hit the bottom at %s. Setting location to %s." % (particle.logstring(), ending.logstring(), pt.logstring())) ending.latitude = pt.latitude ending.longitude = pt.longitude ending.depth = pt.depth # sea-surface if self.usesurface: if ending.depth > 0: if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - rose out of the water. Setting depth to 0." % particle.logstring()) ending.depth = 0 particle.location = ending
def run(self): self.load_initial_dataset() redis_connection = None if self.redis_url is not None and self.redis_results_channel is not None: import redis redis_connection = redis.from_url(self.redis_url) # Setup shoreline self._shoreline = None if self.useshore is True: self._shoreline = Shoreline( path=self.shoreline_path, feature_name=self.shoreline_feature, point=self.release_location_centroid, spatialbuffer=self.shoreline_index_buffer) # Make sure we are not starting on land. Raises exception if we are. self._shoreline.intersect( start_point=self.release_location_centroid, end_point=self.release_location_centroid) # Setup Bathymetry if self.usebathy is True: try: self._bathymetry = Bathymetry(file=self.bathy_path) except Exception: logger.exception( "Could not load Bathymetry file: %s, using no Bathymetry for this run!" % self.bathy_path) self.usebathy = False # Calculate datetime at every timestep modelTimestep, newtimes = AsaTransport.get_time_objects_from_model_timesteps( self.times, start=self.start_time) if self.time_method == 'interp': time_indexs = self.timevar.nearest_index(newtimes, select='before') elif self.time_method == 'nearest': time_indexs = self.timevar.nearest_index(newtimes) else: logger.warn("Method for computing u,v,w,temp,salt not supported!") try: assert len(newtimes) == len(time_indexs) except AssertionError: logger.exception( "Time indexes are messed up. Need to have equal datetime and time indexes" ) raise # Keep track of how much time we spend in each area. tot_boundary_time = 0. tot_model_time = {} tot_read_data = 0. for m in self.models: tot_model_time[m.name] = 0. # Set the base conditions # If using Redis, send the results if redis_connection is not None: redis_connection.publish(self.redis_results_channel, json.dumps(self.particle.timestep_dump())) # loop over timesteps # We don't loop over the last time_index because # we need to query in the time_index and set the particle's # location as the 'newtime' object. for loop_i, i in enumerate(time_indexs[0:-1]): if self.active and self.active.value is False: raise ValueError("Particle exiting due to Failure.") newloc = None st = time.clock() # Get the variable data required by the models if self.time_method == 'nearest': u, v, w, temp, salt = self.get_nearest_data(i) elif self.time_method == 'interp': u, v, w, temp, salt = self.get_linterp_data( i, newtimes[loop_i]) else: logger.warn( "Method for computing u,v,w,temp,salt is unknown. Only 'nearest' and 'interp' are supported." ) tot_read_data += (time.clock() - st) # Get the bathy value at the particles location if self.usebathy is True: bathymetry_value = self._bathymetry.get_depth( self.particle.location) else: bathymetry_value = -999999999999999 # Age the particle by the modelTimestep (seconds) # 'Age' meaning the amount of time it has been forced. self.particle.age(seconds=modelTimestep[loop_i]) # loop over models - sort these in the order you want them to run for model in self.models: st = time.clock() movement = model.move(self.particle, u, v, w, modelTimestep[loop_i], temperature=temp, salinity=salt, bathymetry_value=bathymetry_value) newloc = Location4D(latitude=movement['latitude'], longitude=movement['longitude'], depth=movement['depth'], time=newtimes[loop_i + 1]) tot_model_time[m.name] += (time.clock() - st) if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - moved %.3f meters (horizontally) and %.3f meters (vertically) by %s with data from %s" % (self.particle.logstring(), movement['distance'], movement['vertical_distance'], model.__class__.__name__, newtimes[loop_i].isoformat())) if newloc: st = time.clock() self.boundary_interaction( particle=self.particle, starting=self.particle.location, ending=newloc, distance=movement['distance'], angle=movement['angle'], azimuth=movement['azimuth'], reverse_azimuth=movement['reverse_azimuth'], vertical_distance=movement['vertical_distance'], vertical_angle=movement['vertical_angle']) tot_boundary_time += (time.clock() - st) if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - was forced by %s and is now at %s" % (self.particle.logstring(), model.__class__.__name__, self.particle.location.logstring())) self.particle.note = self.particle.outputstring() # Each timestep, save the particles status and environmental variables. # This keep fields such as temp, salt, halted, settled, and dead matched up with the number of timesteps self.particle.save() # If using Redis, send the results if redis_connection is not None: redis_connection.publish( self.redis_results_channel, json.dumps(self.particle.timestep_dump())) self.dataset.closenc() # We won't pull data for the last entry in locations, but we need to populate it with fill data. self.particle.fill_gap() if self.usebathy is True: self._bathymetry.close() if self.useshore is True: self._shoreline.close() logger.info( textwrap.dedent('''Particle %i Stats: Data read: %f seconds Model forcing: %s seconds Boundary intersection: %f seconds''' % (self.particle.uid, tot_read_data, { s: '{:g} seconds'.format(f) for s, f in list(tot_model_time.items()) }, tot_boundary_time))) return self.particle