def test_reverse_half_distance_until_in_water(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.05, longitude=-75.34, depth=0) ending = Location4D(latitude=38.96, longitude=-75.315, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react( start_point = starting, hit_point = int4d, end_point = ending, feature = intersection['feature'], distance = distance, angle = angle, azimuth = difference['azimuth'], reverse_azimuth = difference['reverse_azimuth'], reverse_distance = 40000) # Should be in water assert s.intersect(start_point=final_point.point, end_point=final_point.point) is None
def test_reverse_half_distance_until_in_water(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.05, longitude=-75.34, depth=0) ending = Location4D(latitude=38.96, longitude=-75.315, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react(start_point=starting, hit_point=int4d, end_point=ending, feature=intersection['feature'], distance=distance, angle=angle, azimuth=difference['azimuth'], reverse_azimuth=difference['reverse_azimuth'], reverse_distance=40000) # Should be in water assert s.intersect(start_point=final_point.point, end_point=final_point.point) is None
def test_reverse_up_left(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.05, longitude=-75.34, depth=0) ending = Location4D(latitude=38.96, longitude=-75.315, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react( start_point = starting, hit_point = int4d, end_point = ending, feature = intersection['feature'], distance = distance, angle = angle, azimuth = difference['azimuth'], reverse_azimuth = difference['reverse_azimuth']) # Resulting latitude should be between the startpoint and the intersection point assert final_point.latitude > int4d.latitude assert final_point.latitude < starting.latitude # Resulting longitude should be between the startpoint and the intersection point assert final_point.longitude < int4d.longitude assert final_point.longitude > starting.longitude
def test_reverse_10_times_then_start_point(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.05, longitude=-75.34, depth=0) ending = Location4D(latitude=38.96, longitude=-75.315, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react( start_point = starting, hit_point = int4d, end_point = ending, feature = intersection['feature'], distance = distance, angle = angle, azimuth = difference['azimuth'], reverse_azimuth = difference['reverse_azimuth'], reverse_distance = 9999999999999999999999999999) # Should be start location assert final_point.longitude == starting.longitude assert final_point.latitude == starting.latitude assert final_point.depth == starting.depth
def test_reverse_distance_traveled(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.05, longitude=-75.34, depth=0) ending = Location4D(latitude=38.96, longitude=-75.315, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react( start_point = starting, hit_point = int4d, end_point = ending, feature = intersection['feature'], distance = distance, angle = angle, azimuth = difference['azimuth'], reverse_azimuth = difference['reverse_azimuth'], reverse_distance = 0.000001) # Resulting point should be VERY close to the hit point. assert abs(int4d.latitude - final_point.latitude) < 0.005 assert abs(int4d.longitude - final_point.longitude) < 0.005
def test_reverse_distance_traveled(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.05, longitude=-75.34, depth=0) ending = Location4D(latitude=38.96, longitude=-75.315, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react(start_point=starting, hit_point=int4d, end_point=ending, feature=intersection['feature'], distance=distance, angle=angle, azimuth=difference['azimuth'], reverse_azimuth=difference['reverse_azimuth'], reverse_distance=0.000001) # Resulting point should be VERY close to the hit point. assert abs(int4d.latitude - final_point.latitude) < 0.005 assert abs(int4d.longitude - final_point.longitude) < 0.005
def test_reverse_12_times_then_start_point(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.05, longitude=-75.34, depth=0) ending = Location4D(latitude=38.96, longitude=-75.315, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react(start_point=starting, hit_point=int4d, end_point=ending, feature=intersection['feature'], distance=distance, angle=angle, azimuth=difference['azimuth'], reverse_azimuth=difference['reverse_azimuth'], reverse_distance=9999999999999999999999999999) # Should be start location assert final_point.longitude == starting.longitude assert final_point.latitude == starting.latitude assert final_point.depth == starting.depth
def test_reverse_up_left(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.05, longitude=-75.34, depth=0) ending = Location4D(latitude=38.96, longitude=-75.315, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react(start_point=starting, hit_point=int4d, end_point=ending, feature=intersection['feature'], distance=distance, angle=angle, azimuth=difference['azimuth'], reverse_azimuth=difference['reverse_azimuth']) # Resulting latitude should be between the startpoint and the intersection point assert final_point.latitude > int4d.latitude assert final_point.latitude < starting.latitude # Resulting longitude should be between the startpoint and the intersection point assert final_point.longitude < int4d.longitude assert final_point.longitude > starting.longitude
def test_reverse_left(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.1, longitude=-74.91, depth=0) ending = Location4D(latitude=39.1, longitude=-74.85, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react(start_point=starting, hit_point=int4d, end_point=ending, feature=intersection['feature'], distance=distance, angle=angle, azimuth=difference['azimuth'], reverse_azimuth=difference['reverse_azimuth']) # Since we are on a stright horizonal line, the latitude will change only slightly assert abs(final_point.latitude - starting.latitude) < 0.005 # Resulting longitude should be between the startpoint and the intersection point assert final_point.longitude < int4d.longitude assert final_point.longitude > starting.longitude
def test_reverse_left(self): s = Shoreline(type='reverse') starting = Location4D(latitude=39.1, longitude=-74.91, depth=0) ending = Location4D(latitude=39.1, longitude=-74.85, depth=0) difference = AsaGreatCircle.great_distance(start_point=starting, end_point=ending) angle = AsaMath.azimuth_to_math_angle(azimuth=difference['azimuth']) distance = difference['distance'] intersection = s.intersect(start_point=starting.point, end_point=ending.point) int4d = Location4D(point=intersection['point']) final_point = s.react( start_point = starting, hit_point = int4d, end_point = ending, feature = intersection['feature'], distance = distance, angle = angle, azimuth = difference['azimuth'], reverse_azimuth = difference['reverse_azimuth']) # Since we are on a stright horizonal line, the latitude will change only slightly assert abs(final_point.latitude - starting.latitude) < 0.005 # Resulting longitude should be between the startpoint and the intersection point assert final_point.longitude < int4d.longitude assert final_point.longitude > starting.longitude
def test_intersection_speed(self): # Intersects on the west coast of NovaScotia starting = Location4D(longitude=-66.1842219282406177, latitude=44.0141581697495852, depth=0).point ending = Location4D(longitude=-66.1555195384399326, latitude=44.0387992322117370, depth=0).point s = Shoreline(point=starting, spatialbuffer=1) st = time.time() intersection = s.intersect(start_point=starting, end_point=ending)['point'] print "Intersection Time: " + str(time.time() - st)
def test_multipart_shape_intersection_speed(self): # Intersects on the west coast of NovaScotia starting = Location4D(longitude=-146.62, latitude=60.755, depth=0).point ending = Location4D(longitude=-146.60, latitude=60.74, depth=0).point shore_path = os.path.join(self.shoreline_path, "westcoast", "New_Land_Clean.shp") s = Shoreline(file=shore_path, point=starting, spatialbuffer=1) st = time.time() intersection = s.intersect(start_point=starting, end_point=ending)['point'] print "Multipart Shoreline Intersection Time: " + str(time.time() - st)
def test_large_shape_intersection_speed(self): # Intersects on the west coast of NovaScotia starting = Location4D(longitude=-146.62, latitude=60.755, depth=0).point ending = Location4D(longitude=-146.60, latitude=60.74, depth=0).point shore_path = os.path.join(self.shoreline_path, "alaska", "AK_Land_Basemap.shp") s = Shoreline(file=shore_path, point=starting, spatialbuffer=0.25) st = time.time() intersection = s.intersect(start_point=starting, end_point=ending)['point'] print "Large Shoreline Intersection Time: " + str(time.time() - st)
def test_water_start_land_end_intersection(self): # Starts in the water and ends on land s = Shoreline() # -75, 39 is in the middle of the Delaware Bay # -75, 39.5 is on land # Intersection should be a Point starting somewhere around -75, 39.185 -> 39.195 starting = Location4D(latitude=39, longitude=-75, depth=0).point ending = Location4D(latitude=39.5, longitude=-75, depth=0).point intersection = Location4D(point=s.intersect(start_point=starting, end_point=ending)['point']) assert -75 == intersection.longitude assert intersection.latitude > 39.185 assert intersection.latitude < 39.195
def test_water_start_land_end_intersection(self): # Starts in the water and ends on land s = Shoreline() # -75, 39 is in the middle of the Delaware Bay # -75, 39.5 is on land # Intersection should be a Point starting somewhere around -75, 39.185 -> 39.195 starting = Location4D(latitude=39, longitude=-75, depth=0).point ending = Location4D(latitude=39.5, longitude=-75, depth=0).point intersection = Location4D( point=s.intersect(start_point=starting, end_point=ending)['point']) assert -75 == intersection.longitude assert intersection.latitude > 39.185 assert intersection.latitude < 39.195
def test_water_start_water_end_jump_over_land_intersection(self): # Starts on water and ends on water, but there is land inbetween s = Shoreline() # -75, 39 is in the middle of the Delaware Bay # -74, 39 is in the Atlantic # This jumps over a peninsula. # Intersection should be the Point -74.96 -> -74.94, 39 # starting = Location4D(latitude=39, longitude=-75, depth=0).point ending = Location4D(latitude=39, longitude=-74, depth=0).point intersection = Location4D(point=s.intersect(start_point=starting, end_point=ending)['point']) assert 39 == intersection.latitude assert intersection.longitude > -74.96 assert intersection.longitude < -74.94
def test_water_start_water_end_jump_over_land_intersection(self): # Starts on water and ends on water, but there is land inbetween s = Shoreline() # -75, 39 is in the middle of the Delaware Bay # -74, 39 is in the Atlantic # This jumps over a peninsula. # Intersection should be the Point -74.96 -> -74.94, 39 # starting = Location4D(latitude=39, longitude=-75, depth=0).point ending = Location4D(latitude=39, longitude=-74, depth=0).point intersection = Location4D( point=s.intersect(start_point=starting, end_point=ending)['point']) assert 39 == intersection.latitude assert intersection.longitude > -74.96 assert intersection.longitude < -74.94
class BaseForcer(object): def __init__(self, hydrodataset, **kwargs): """ part, common_variables, timevar, times, start_time, models, release_location_centroid, usebathy, useshore, usesurface, get_data, n_run, read_lock, has_read_lock, read_count, point_get, data_request_lock, has_data_request_lock, reverse_distance=None, bathy=None, shoreline_path=None, shoreline_feature=None, time_method=None, caching=None, redis_url=None, redis_results_channel=None, shoreline_index_buffer=None): This is the task/class/object/job that forces an individual particle and communicates with the other particles and data controller for local cache updates """ assert hydrodataset is not None # Common parameters self.hydrodataset = hydrodataset self.bathy_path = kwargs.get("bathy_path") self.release_location_centroid = kwargs.get("release_location_centroid") self.particle = kwargs.get("particle") self.times = kwargs.get("times") self.timevar = kwargs.get("timevar", None) self.start_time = kwargs.get("start_time") self.models = kwargs.get("models", []) self.usebathy = kwargs.get("usebathy", False) self.useshore = kwargs.get("useshore", False) self.usesurface = kwargs.get("usesurface", True) self.shoreline_path = kwargs.get("shoreline_path") self.shoreline_feature = kwargs.get("shoreline_feature", None) self.shoreline_index_buffer = kwargs.get("shoreline_index_buffer", 0.1) self.time_method = kwargs.get("time_method", "nearest") self.reverse_distance = kwargs.get("reverse_distance", 500) # Redis for results self.redis_url = kwargs.get("redis_url", None) self.redis_results_channel = kwargs.get("redis_results_channel", None) # Set common variable names self.common_variables = kwargs.get("common_variables") self.uname = self.common_variables.get("u", None) self.vname = self.common_variables.get("v", None) self.wname = self.common_variables.get("w", None) self.temp_name = self.common_variables.get("temp", None) self.salt_name = self.common_variables.get("salt", None) self.xname = self.common_variables.get("x", None) self.yname = self.common_variables.get("y", None) self.zname = self.common_variables.get("z", None) self.tname = self.common_variables.get("time", None) self.active = None def load_initial_dataset(self): """ Initialize self.dataset, then close it A cacher will have to wrap this in locks, while a straight runner will not. """ try: self.dataset = CommonDataset.open(self.hydrodataset) if self.timevar is None: self.timevar = self.dataset.gettimevar(self.common_variables.get("u")) except Exception: logger.warn("No source dataset: %s. Particle exiting" % self.hydrodataset) raise def boundary_interaction(self, **kwargs): """ Returns a list of Location4D objects """ particle = kwargs.pop('particle') starting = kwargs.pop('starting') ending = kwargs.pop('ending') # shoreline if self.useshore: intersection_point = self._shoreline.intersect(start_point=starting.point, end_point=ending.point) if intersection_point is not None: # Set the intersection point. hitpoint = Location4D(point=intersection_point['point'], time=starting.time + (ending.time - starting.time)) particle.location = hitpoint # This relies on the shoreline to put the particle in water and not on shore. resulting_point = self._shoreline.react(start_point=starting, end_point=ending, hit_point=hitpoint, reverse_distance=self.reverse_distance, feature=intersection_point['feature'], distance=kwargs.get('distance'), angle=kwargs.get('angle'), azimuth=kwargs.get('azimuth'), reverse_azimuth=kwargs.get('reverse_azimuth')) ending.latitude = resulting_point.latitude ending.longitude = resulting_point.longitude ending.depth = resulting_point.depth if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - hit the shoreline at %s. Setting location to %s." % (particle.logstring(), hitpoint.logstring(), ending.logstring())) # bathymetry if self.usebathy: if not particle.settled: bintersect = self._bathymetry.intersect(start_point=starting, end_point=ending) if bintersect: pt = self._bathymetry.react(type='reverse', start_point=starting, end_point=ending) if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - hit the bottom at %s. Setting location to %s." % (particle.logstring(), ending.logstring(), pt.logstring())) ending.latitude = pt.latitude ending.longitude = pt.longitude ending.depth = pt.depth # sea-surface if self.usesurface: if ending.depth > 0: if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - rose out of the water. Setting depth to 0." % particle.logstring()) ending.depth = 0 particle.location = ending def get_nearest_data(self, i): """ Note: self.dataset.opennc() must be called before calling this function. This is because the caching forcer must close it everytime, while a non caching forcer can leave the dataset open. """ try: # Grab data at time index closest to particle location u = np.mean(np.mean(self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.particle.location ))) v = np.mean(np.mean(self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.particle.location ))) # if there is vertical velocity inthe dataset, get it if 'w' in self.dataset.nc.variables: w = np.mean(np.mean(self.dataset.get_values('w', timeindsf=[np.asarray([i])], point=self.particle.location ))) else: w = 0.0 # If there is salt and temp in the dataset, get it if self.temp_name is not None and self.salt_name is not None: temp = np.mean(np.mean(self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.particle.location ))) salt = np.mean(np.mean(self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.particle.location ))) # Check for nans that occur in the ocean (happens because # of model and coastline resolution mismatches) if np.isnan(u).any() or np.isnan(v).any() or np.isnan(w).any(): # Take the mean of the closest 4 points # If this includes nan which it will, result is nan uarray1 = self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.particle.location, num=2) varray1 = self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.particle.location, num=2) if 'w' in self.dataset.nc.variables: warray1 = self.dataset.get_values('w', timeinds=[np.asarray([i])], point=self.particle.location, num=2) w = warray1.mean() else: w = 0.0 if self.temp_name is not None and self.salt_name is not None: temparray1 = self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.particle.location, num=2) saltarray1 = self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.particle.location, num=2) temp = temparray1.mean() salt = saltarray1.mean() u = uarray1.mean() v = varray1.mean() if self.temp_name is None: temp = np.nan if self.salt_name is None: salt = np.nan except Exception: logger.exception("Could not retrieve data.") raise return u, v, w, temp, salt def get_linterp_data(self, i, currenttime): """ Note: self.dataset.opennc() must be called before calling this function. This is because the caching forcer must close it everytime, while a non caching forcer can leave the dataset open. """ try: # Grab data at time index closest to particle location u = [np.mean(np.mean(self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.particle.location ))), np.mean(np.mean(self.dataset.get_values('u', timeinds=[np.asarray([i+1])], point=self.particle.location )))] v = [np.mean(np.mean(self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.particle.location ))), np.mean(np.mean(self.dataset.get_values('v', timeinds=[np.asarray([i+1])], point=self.particle.location )))] # if there is vertical velocity inthe dataset, get it if 'w' in self.dataset.nc.variables: w = [np.mean(np.mean(self.dataset.get_values('w', timeinds=[np.asarray([i])], point=self.particle.location ))), np.mean(np.mean(self.dataset.get_values('w', timeinds=[np.asarray([i+1])], point=self.particle.location )))] else: w = [0.0, 0.0] # If there is salt and temp in the dataset, get it if self.temp_name is not None and self.salt_name is not None: temp = [np.mean(np.mean(self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.particle.location ))), np.mean(np.mean(self.dataset.get_values('temp', timeinds=[np.asarray([i+1])], point=self.particle.location )))] salt = [np.mean(np.mean(self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.particle.location ))), np.mean(np.mean(self.dataset.get_values('salt', timeinds=[np.asarray([i+1])], point=self.particle.location )))] # Check for nans that occur in the ocean (happens because # of model and coastline resolution mismatches) if np.isnan(u).any() or np.isnan(v).any() or np.isnan(w).any(): # Take the mean of the closest 4 points # If this includes nan which it will, result is nan uarray1 = self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.particle.location, num=2) varray1 = self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.particle.location, num=2) uarray2 = self.dataset.get_values('u', timeinds=[np.asarray([i+1])], point=self.particle.location, num=2) varray2 = self.dataset.get_values('v', timeinds=[np.asarray([i+1])], point=self.particle.location, num=2) if 'w' in self.dataset.nc.variables: warray1 = self.dataset.get_values('w', timeinds=[np.asarray([i])], point=self.particle.location, num=2) warray2 = self.dataset.get_values('w', timeinds=[np.asarray([i+1])], point=self.particle.location, num=2) w = [warray1.mean(), warray2.mean()] else: w = [0.0, 0.0] if self.temp_name is not None and self.salt_name is not None: temparray1 = self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.particle.location, num=2) saltarray1 = self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.particle.location, num=2) temparray2 = self.dataset.get_values('temp', timeinds=[np.asarray([i+1])], point=self.particle.location, num=2) saltarray2 = self.dataset.get_values('salt', timeinds=[np.asarray([i+1])], point=self.particle.location, num=2) temp = [temparray1.mean(), temparray2.mean()] salt = [saltarray1.mean(), saltarray2.mean()] u = [uarray1.mean(), uarray2.mean()] v = [varray1.mean(), varray2.mean()] # Linear interp of data between timesteps currenttime = date2num(currenttime) timevar = self.timevar.datenum u = self.linterp(timevar[i:i+2], u, currenttime) v = self.linterp(timevar[i:i+2], v, currenttime) w = self.linterp(timevar[i:i+2], w, currenttime) if self.temp_name is not None and self.salt_name is not None: temp = self.linterp(timevar[i:i+2], temp, currenttime) salt = self.linterp(timevar[i:i+2], salt, currenttime) if self.temp_name is None: temp = np.nan if self.salt_name is None: salt = np.nan except Exception: logger.exception("Could not retrieve data.") raise return u, v, w, temp, salt def linterp(self, setx, sety, x): """ Linear interp of model data values between time steps """ if math.isnan(sety[0]) or math.isnan(setx[0]): return np.nan return sety[0] + (x - setx[0]) * ( (sety[1]-sety[0]) / (setx[1]-setx[0]) ) def run(self): self.load_initial_dataset() redis_connection = None if self.redis_url is not None and self.redis_results_channel is not None: import redis redis_connection = redis.from_url(self.redis_url) # Setup shoreline self._shoreline = None if self.useshore is True: self._shoreline = Shoreline(path=self.shoreline_path, feature_name=self.shoreline_feature, point=self.release_location_centroid, spatialbuffer=self.shoreline_index_buffer) # Make sure we are not starting on land. Raises exception if we are. self._shoreline.intersect(start_point=self.release_location_centroid, end_point=self.release_location_centroid) # Setup Bathymetry if self.usebathy is True: try: self._bathymetry = Bathymetry(file=self.bathy_path) except Exception: logger.exception("Could not load Bathymetry file: %s, using no Bathymetry for this run!" % self.bathy_path) self.usebathy = False # Calculate datetime at every timestep modelTimestep, newtimes = AsaTransport.get_time_objects_from_model_timesteps(self.times, start=self.start_time) if self.time_method == 'interp': time_indexs = self.timevar.nearest_index(newtimes, select='before') elif self.time_method == 'nearest': time_indexs = self.timevar.nearest_index(newtimes) else: logger.warn("Method for computing u,v,w,temp,salt not supported!") try: assert len(newtimes) == len(time_indexs) except AssertionError: logger.exception("Time indexes are messed up. Need to have equal datetime and time indexes") raise # Keep track of how much time we spend in each area. tot_boundary_time = 0. tot_model_time = {} tot_read_data = 0. for m in self.models: tot_model_time[m.name] = 0. # Set the base conditions # If using Redis, send the results if redis_connection is not None: redis_connection.publish(self.redis_results_channel, json.dumps(self.particle.timestep_dump())) # loop over timesteps # We don't loop over the last time_index because # we need to query in the time_index and set the particle's # location as the 'newtime' object. for loop_i, i in enumerate(time_indexs[0:-1]): if self.active and self.active.value is False: raise ValueError("Particle exiting due to Failure.") newloc = None st = time.clock() # Get the variable data required by the models if self.time_method == 'nearest': u, v, w, temp, salt = self.get_nearest_data(i) elif self.time_method == 'interp': u, v, w, temp, salt = self.get_linterp_data(i, newtimes[loop_i]) else: logger.warn("Method for computing u,v,w,temp,salt is unknown. Only 'nearest' and 'interp' are supported.") tot_read_data += (time.clock() - st) # Get the bathy value at the particles location if self.usebathy is True: bathymetry_value = self._bathymetry.get_depth(self.particle.location) else: bathymetry_value = -999999999999999 # Age the particle by the modelTimestep (seconds) # 'Age' meaning the amount of time it has been forced. self.particle.age(seconds=modelTimestep[loop_i]) # loop over models - sort these in the order you want them to run for model in self.models: st = time.clock() movement = model.move(self.particle, u, v, w, modelTimestep[loop_i], temperature=temp, salinity=salt, bathymetry_value=bathymetry_value) newloc = Location4D(latitude=movement['latitude'], longitude=movement['longitude'], depth=movement['depth'], time=newtimes[loop_i+1]) tot_model_time[m.name] += (time.clock() - st) if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - moved %.3f meters (horizontally) and %.3f meters (vertically) by %s with data from %s" % (self.particle.logstring(), movement['distance'], movement['vertical_distance'], model.__class__.__name__, newtimes[loop_i].isoformat())) if newloc: st = time.clock() self.boundary_interaction(particle=self.particle, starting=self.particle.location, ending=newloc, distance=movement['distance'], angle=movement['angle'], azimuth=movement['azimuth'], reverse_azimuth=movement['reverse_azimuth'], vertical_distance=movement['vertical_distance'], vertical_angle=movement['vertical_angle']) tot_boundary_time += (time.clock() - st) if logger.isEnabledFor(logging.DEBUG): logger.debug("%s - was forced by %s and is now at %s" % (self.particle.logstring(), model.__class__.__name__, self.particle.location.logstring())) self.particle.note = self.particle.outputstring() # Each timestep, save the particles status and environmental variables. # This keep fields such as temp, salt, halted, settled, and dead matched up with the number of timesteps self.particle.save() # If using Redis, send the results if redis_connection is not None: redis_connection.publish(self.redis_results_channel, json.dumps(self.particle.timestep_dump())) self.dataset.closenc() # We won't pull data for the last entry in locations, but we need to populate it with fill data. self.particle.fill_gap() if self.usebathy is True: self._bathymetry.close() if self.useshore is True: self._shoreline.close() logger.info(textwrap.dedent('''Particle %i Stats: Data read: %f seconds Model forcing: %s seconds Boundary intersection: %f seconds''' % (self.particle.uid, tot_read_data, { s: '{:g} seconds'.format(f) for s, f in list(tot_model_time.items()) }, tot_boundary_time))) return self.particle def __call__(self, active): self.active = active return self.run()
class ForceParticle(object): from paegan.transport.shoreline import Shoreline from paegan.transport.bathymetry import Bathymetry def __str__(self): return self.part.__str__() def __init__(self, part, remotehydro, common_variables, timevar_pickle_path, times, start_time, models, release_location_centroid, usebathy, useshore, usesurface, get_data, n_run, read_lock, has_read_lock, read_count, point_get, data_request_lock, has_data_request_lock, reverse_distance=None, bathy=None, shoreline_path=None, cache=None, time_method=None): """ This is the task/class/object/job that forces an individual particle and communicates with the other particles and data controller for local cache updates """ assert cache != None self.cache_path = cache self.bathy = bathy self.common_variables = common_variables self.localpath = self.cache_path self.release_location_centroid = release_location_centroid self.part = part self.times = times self.start_time = start_time self.models = models self.usebathy = usebathy self.useshore = useshore self.usesurface = usesurface self.get_data = get_data self.n_run = n_run self.read_lock = read_lock self.has_read_lock = has_read_lock self.read_count = read_count self.point_get = point_get self.data_request_lock = data_request_lock self.has_data_request_lock = has_data_request_lock self.shoreline_path = shoreline_path self.timevar_pickle_path = timevar_pickle_path # Set common variable names self.uname = common_variables.get("u", None) self.vname = common_variables.get("v", None) self.wname = common_variables.get("w", None) self.temp_name = common_variables.get("temp", None) self.salt_name = common_variables.get("salt", None) self.xname = common_variables.get("x", None) self.yname = common_variables.get("y", None) self.zname = common_variables.get("z", None) self.tname = common_variables.get("time", None) self.reverse_distance = reverse_distance if time_method is None: time_method = 'interp' self.time_method = time_method def need_data(self, i): """ Method to test if cache contains the data that the particle needs """ logger.debug("Checking cache for data availability at %s." % self.part.location.logstring()) try: # Tell the DataController that we are going to be reading from the file with self.read_lock: self.read_count.value += 1 self.has_read_lock.append(os.getpid()) self.dataset.opennc() # Test if the cache has the data we need # If the point we request contains fill values, # we need data cached_lookup = self.dataset.get_values('domain', timeinds=[np.asarray([i])], point=self.part.location) logger.debug("Type of result: %s" % type(cached_lookup)) logger.debug("Double mean of result: %s" % np.mean(np.mean(cached_lookup))) logger.debug("Type of Double mean of result: %s" % type(np.mean(np.mean(cached_lookup)))) if type(np.mean(np.mean(cached_lookup))) == np.ma.core.MaskedConstant: need = True logger.debug("I NEED data. Got back: %s" % cached_lookup) else: need = False logger.debug("I DO NOT NEED data") except StandardError: # If the time index doesnt even exist, we need need = True logger.debug("I NEED data (no time index exists in cache)") finally: self.dataset.closenc() with self.read_lock: self.read_count.value -= 1 self.has_read_lock.remove(os.getpid()) return need # return true if need data or false if dont def linterp(self, setx, sety, x): """ Linear interp of model data values between time steps """ if math.isnan(sety[0]) or math.isnan(setx[0]): return np.nan #if math.isnan(sety[0]): # sety[0] = 0. #if math.isnan(sety[1]): # sety[1] = 0. return sety[0] + (x - setx[0]) * ( (sety[1]-sety[0]) / (setx[1]-setx[0]) ) def data_interp(self, i, timevar, currenttime): """ Method to streamline request for data from cache, Uses linear interpolation bewtween timesteps to get u,v,w,temp,salt """ if self.active.value == True: while self.get_data.value == True: logger.debug("Waiting for DataController to release cache file so I can read from it...") timer.sleep(4) pass if self.need_data(i+1): # Acquire lock for asking for data self.data_request_lock.acquire() self.has_data_request_lock.value = os.getpid() try: # Do I still need data? if self.need_data(i+1): # Tell the DataController that we are going to be reading from the file with self.read_lock: self.read_count.value += 1 self.has_read_lock.append(os.getpid()) # Open netcdf file on disk from commondataset self.dataset.opennc() # Get the indices for the current particle location indices = self.dataset.get_indices('u', timeinds=[np.asarray([i-1])], point=self.part.location ) self.dataset.closenc() with self.read_lock: self.read_count.value -= 1 self.has_read_lock.remove(os.getpid()) # Override the time # get the current time index data self.point_get.value = [indices[0] + 1, indices[-2], indices[-1]] # Request that the data controller update the cache self.get_data.value = True # Wait until the data controller is done if self.active.value == True: while self.get_data.value == True: logger.debug("Waiting for DataController to update cache with the CURRENT time index") timer.sleep(4) pass # get the next time index data self.point_get.value = [indices[0] + 2, indices[-2], indices[-1]] # Request that the data controller update the cache self.get_data.value = True # Wait until the data controller is done if self.active.value == True: while self.get_data.value == True: logger.debug("Waiting for DataController to update cache with the NEXT time index") timer.sleep(4) pass except StandardError: logger.warn("Particle failed to request data correctly") raise finally: # Release lock for asking for data self.has_data_request_lock.value = -1 self.data_request_lock.release() # Tell the DataController that we are going to be reading from the file with self.read_lock: self.read_count.value += 1 self.has_read_lock.append(os.getpid()) try: # Open netcdf file on disk from commondataset self.dataset.opennc() # Grab data at time index closest to particle location u = [np.mean(np.mean(self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.part.location ))), np.mean(np.mean(self.dataset.get_values('u', timeinds=[np.asarray([i+1])], point=self.part.location )))] v = [np.mean(np.mean(self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.part.location ))), np.mean(np.mean(self.dataset.get_values('v', timeinds=[np.asarray([i+1])], point=self.part.location )))] # if there is vertical velocity inthe dataset, get it if 'w' in self.dataset.nc.variables: w = [np.mean(np.mean(self.dataset.get_values('w', timeinds=[np.asarray([i])], point=self.part.location ))), np.mean(np.mean(self.dataset.get_values('w', timeinds=[np.asarray([i+1])], point=self.part.location )))] else: w = [0.0, 0.0] # If there is salt and temp in the dataset, get it if self.temp_name != None and self.salt_name != None: temp = [np.mean(np.mean(self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.part.location ))), np.mean(np.mean(self.dataset.get_values('temp', timeinds=[np.asarray([i+1])], point=self.part.location )))] salt = [np.mean(np.mean(self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.part.location ))), np.mean(np.mean(self.dataset.get_values('salt', timeinds=[np.asarray([i+1])], point=self.part.location )))] # Check for nans that occur in the ocean (happens because # of model and coastline resolution mismatches) if np.isnan(u).any() or np.isnan(v).any() or np.isnan(w).any(): # Take the mean of the closest 4 points # If this includes nan which it will, result is nan uarray1 = self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.part.location, num=2) varray1 = self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.part.location, num=2) uarray2 = self.dataset.get_values('u', timeinds=[np.asarray([i+1])], point=self.part.location, num=2) varray2 = self.dataset.get_values('v', timeinds=[np.asarray([i+1])], point=self.part.location, num=2) if 'w' in self.dataset.nc.variables: warray1 = self.dataset.get_values('w', timeinds=[np.asarray([i])], point=self.part.location, num=2) warray2 = self.dataset.get_values('w', timeinds=[np.asarray([i+1])], point=self.part.location, num=2) w = [warray1.mean(), warray2.mean()] else: w = [0.0, 0.0] if self.temp_name != None and self.salt_name != None: temparray1 = self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.part.location, num=2) saltarray1 = self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.part.location, num=2) temparray2 = self.dataset.get_values('temp', timeinds=[np.asarray([i+1])], point=self.part.location, num=2) saltarray2 = self.dataset.get_values('salt', timeinds=[np.asarray([i+1])], point=self.part.location, num=2) temp = [temparray1.mean(), temparray2.mean()] salt = [saltarray1.mean(), saltarray2.mean()] u = [uarray1.mean(), uarray2.mean()] v = [varray1.mean(), varray2.mean()] # Linear interp of data between timesteps currenttime = date2num(currenttime) timevar = timevar.datenum u = self.linterp(timevar[i:i+2], u, currenttime) v = self.linterp(timevar[i:i+2], v, currenttime) w = self.linterp(timevar[i:i+2], w, currenttime) if self.temp_name != None and self.salt_name != None: temp = self.linterp(timevar[i:i+2], temp, currenttime) salt = self.linterp(timevar[i:i+2], salt, currenttime) if self.temp_name is None: temp = np.nan if self.salt_name is None: salt = np.nan #logger.info(self.dataset.get_xyind_from_point('u', self.part.location, num=1)) except StandardError: logger.error("Error in data_interp method on ForceParticle") raise finally: self.dataset.closenc() with self.read_lock: self.read_count.value -= 1 self.has_read_lock.remove(os.getpid()) return u, v, w, temp, salt def data_nearest(self, i, currenttime): """ Method to streamline request for data from cache, Uses nearest time to get u,v,w,temp,salt """ if self.active.value == True: while self.get_data.value == True: logger.debug("Waiting for DataController to release cache file so I can read from it...") timer.sleep(4) pass if self.need_data(i): # Acquire lock for asking for data self.data_request_lock.acquire() self.has_data_request_lock.value = os.getpid() try: if self.need_data(i): with self.read_lock: self.read_count.value += 1 self.has_read_lock.append(os.getpid()) # Open netcdf file on disk from commondataset self.dataset.opennc() # Get the indices for the current particle location indices = self.dataset.get_indices('u', timeinds=[np.asarray([i-1])], point=self.part.location ) self.dataset.closenc() with self.read_lock: self.read_count.value -= 1 self.has_read_lock.remove(os.getpid()) # Override the time self.point_get.value = [indices[0]+1, indices[-2], indices[-1]] # Request that the data controller update the cache # DATA CONTOLLER STARTS self.get_data.value = True # Wait until the data controller is done if self.active.value == True: while self.get_data.value == True: logger.debug("Waiting for DataController to update cache...") timer.sleep(4) pass except StandardError: raise finally: self.has_data_request_lock.value = -1 self.data_request_lock.release() # Tell the DataController that we are going to be reading from the file with self.read_lock: self.read_count.value += 1 self.has_read_lock.append(os.getpid()) try: # Open netcdf file on disk from commondataset self.dataset.opennc() # Grab data at time index closest to particle location u = np.mean(np.mean(self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.part.location ))) v = np.mean(np.mean(self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.part.location ))) # if there is vertical velocity inthe dataset, get it if 'w' in self.dataset.nc.variables: w = np.mean(np.mean(self.dataset.get_values('w', timeindsf=[np.asarray([i])], point=self.part.location ))) else: w = 0.0 # If there is salt and temp in the dataset, get it if self.temp_name != None and self.salt_name != None: temp = np.mean(np.mean(self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.part.location ))) salt = np.mean(np.mean(self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.part.location ))) # Check for nans that occur in the ocean (happens because # of model and coastline resolution mismatches) if np.isnan(u).any() or np.isnan(v).any() or np.isnan(w).any(): # Take the mean of the closest 4 points # If this includes nan which it will, result is nan uarray1 = self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.part.location, num=2) varray1 = self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.part.location, num=2) if 'w' in self.dataset.nc.variables: warray1 = self.dataset.get_values('w', timeinds=[np.asarray([i])], point=self.part.location, num=2) w = warray1.mean() else: w = 0.0 if self.temp_name != None and self.salt_name != None: temparray1 = self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.part.location, num=2) saltarray1 = self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.part.location, num=2) temp = temparray1.mean() salt = saltarray1.mean() u = uarray1.mean() v = varray1.mean() if self.temp_name is None: temp = np.nan if self.salt_name is None: salt = np.nan #logger.info(self.dataset.get_xyind_from_point('u', self.part.location, num=1)) except StandardError: logger.error("Error in data_nearest on ForceParticle") raise finally: self.dataset.closenc() with self.read_lock: self.read_count.value -= 1 self.has_read_lock.remove(os.getpid()) return u, v, w, temp, salt def __call__(self, proc, active): self.active = active if self.usebathy == True: self._bathymetry = Bathymetry(file=self.bathy) self._shoreline = None if self.useshore == True: self._shoreline = Shoreline(file=self.shoreline_path, point=self.release_location_centroid, spatialbuffer=0.25) # Make sure we are not starting on land. Raises exception if we are. self._shoreline.intersect(start_point=self.release_location_centroid, end_point=self.release_location_centroid) self.proc = proc part = self.part if self.active.value == True: while self.get_data.value == True: logger.debug("Waiting for DataController to start...") timer.sleep(10) pass # Initialize commondataset of local cache, then # close the related netcdf file try: with self.read_lock: self.read_count.value += 1 self.has_read_lock.append(os.getpid()) self.dataset = CommonDataset.open(self.localpath) self.dataset.closenc() except StandardError: logger.warn("No cache file: %s. Particle exiting" % self.localpath) raise finally: with self.read_lock: self.read_count.value -= 1 self.has_read_lock.remove(os.getpid()) # Calculate datetime at every timestep modelTimestep, newtimes = AsaTransport.get_time_objects_from_model_timesteps(self.times, start=self.start_time) # Load Timevar from pickle serialization f = open(self.timevar_pickle_path,"rb") timevar = pickle.load(f) f.close() if self.time_method == 'interp': time_indexs = timevar.nearest_index(newtimes, select='before') elif self.time_method == 'nearest': time_indexs = timevar.nearest_index(newtimes) else: logger.warn("Method for computing u,v,w,temp,salt not supported!") try: assert len(newtimes) == len(time_indexs) except AssertionError: logger.error("Time indexes are messed up. Need to have equal datetime and time indexes") raise # loop over timesteps # We don't loop over the last time_index because # we need to query in the time_index and set the particle's # location as the 'newtime' object. for loop_i, i in enumerate(time_indexs[0:-1]): if self.active.value == False: raise ValueError("Particle exiting due to Failure.") newloc = None # if need a time that is outside of what we have #if self.active.value == True: # while self.get_data.value == True: # logger.info("Waiting for DataController to get out...") # timer.sleep(4) # pass # Get the variable data required by the models if self.time_method == 'nearest': u, v, w, temp, salt = self.data_nearest(i, newtimes[loop_i]) elif self.time_method == 'interp': u, v, w, temp, salt = self.data_interp(i, timevar, newtimes[loop_i]) else: logger.warn("Method for computing u,v,w,temp,salt not supported!") #logger.info("U: %.4f, V: %.4f, W: %.4f" % (u,v,w)) #logger.info("Temp: %.4f, Salt: %.4f" % (temp,salt)) # Get the bathy value at the particles location if self.usebathy == True: bathymetry_value = self._bathymetry.get_depth(part.location) else: bathymetry_value = -999999999999999 # Age the particle by the modelTimestep (seconds) # 'Age' meaning the amount of time it has been forced. part.age(seconds=modelTimestep[loop_i]) # loop over models - sort these in the order you want them to run for model in self.models: movement = model.move(part, u, v, w, modelTimestep[loop_i], temperature=temp, salinity=salt, bathymetry_value=bathymetry_value) newloc = Location4D(latitude=movement['latitude'], longitude=movement['longitude'], depth=movement['depth'], time=newtimes[loop_i+1]) logger.debug("%s - moved %.3f meters (horizontally) and %.3f meters (vertically) by %s with data from %s" % (part.logstring(), movement['distance'], movement['vertical_distance'], model.__class__.__name__, newtimes[loop_i].isoformat())) if newloc: self.boundary_interaction(particle=part, starting=part.location, ending=newloc, distance=movement['distance'], angle=movement['angle'], azimuth=movement['azimuth'], reverse_azimuth=movement['reverse_azimuth'], vertical_distance=movement['vertical_distance'], vertical_angle=movement['vertical_angle']) logger.debug("%s - was forced by %s and is now at %s" % (part.logstring(), model.__class__.__name__, part.location.logstring())) part.note = part.outputstring() # Each timestep, save the particles status and environmental variables. # This keep fields such as temp, salt, halted, settled, and dead matched up with the number of timesteps part.save() # We won't pull data for the last entry in locations, but we need to populate it with fill data. part.fill_environment_gap() if self.usebathy == True: self._bathymetry.close() if self.useshore == True: self._shoreline.close() return part def boundary_interaction(self, **kwargs): """ Returns a list of Location4D objects """ particle = kwargs.pop('particle') starting = kwargs.pop('starting') ending = kwargs.pop('ending') # shoreline if self.useshore: intersection_point = self._shoreline.intersect(start_point=starting.point, end_point=ending.point) if intersection_point: # Set the intersection point. hitpoint = Location4D(point=intersection_point['point'], time=starting.time + (ending.time - starting.time)) particle.location = hitpoint # This relies on the shoreline to put the particle in water and not on shore. resulting_point = self._shoreline.react(start_point=starting, end_point=ending, hit_point=hitpoint, reverse_distance=self.reverse_distance, feature=intersection_point['feature'], distance=kwargs.get('distance'), angle=kwargs.get('angle'), azimuth=kwargs.get('azimuth'), reverse_azimuth=kwargs.get('reverse_azimuth')) ending.latitude = resulting_point.latitude ending.longitude = resulting_point.longitude ending.depth = resulting_point.depth logger.debug("%s - hit the shoreline at %s. Setting location to %s." % (particle.logstring(), hitpoint.logstring(), ending.logstring())) # bathymetry if self.usebathy: if not particle.settled: bintersect = self._bathymetry.intersect(start_point=starting, end_point=ending) if bintersect: pt = self._bathymetry.react(type='reverse', start_point=starting, end_point=ending) logger.debug("%s - hit the bottom at %s. Setting location to %s." % (particle.logstring(), ending.logstring(), pt.logstring())) ending.latitude = pt.latitude ending.longitude = pt.longitude ending.depth = pt.depth # sea-surface if self.usesurface: if ending.depth > 0: #logger.debug("%s - rose out of the water. Setting depth to 0." % particle.logstring()) ending.depth = 0 particle.location = ending return
class BaseForcer(object): def __init__(self, hydrodataset, **kwargs): """ part, common_variables, timevar, times, start_time, models, release_location_centroid, usebathy, useshore, usesurface, get_data, n_run, read_lock, has_read_lock, read_count, point_get, data_request_lock, has_data_request_lock, reverse_distance=None, bathy=None, shoreline_path=None, shoreline_feature=None, time_method=None, caching=None, redis_url=None, redis_results_channel=None, shoreline_index_buffer=None): This is the task/class/object/job that forces an individual particle and communicates with the other particles and data controller for local cache updates """ assert hydrodataset is not None # Common parameters self.hydrodataset = hydrodataset self.bathy_path = kwargs.get("bathy_path") self.release_location_centroid = kwargs.get( "release_location_centroid") self.particle = kwargs.get("particle") self.times = kwargs.get("times") self.timevar = kwargs.get("timevar", None) self.start_time = kwargs.get("start_time") self.models = kwargs.get("models", []) self.usebathy = kwargs.get("usebathy", False) self.useshore = kwargs.get("useshore", False) self.usesurface = kwargs.get("usesurface", True) self.shoreline_path = kwargs.get("shoreline_path") self.shoreline_feature = kwargs.get("shoreline_feature", None) self.shoreline_index_buffer = kwargs.get("shoreline_index_buffer", 0.1) self.time_method = kwargs.get("time_method", "nearest") self.reverse_distance = kwargs.get("reverse_distance", 500) # Redis for results self.redis_url = kwargs.get("redis_url", None) self.redis_results_channel = kwargs.get("redis_results_channel", None) # Set common variable names self.common_variables = kwargs.get("common_variables") self.uname = self.common_variables.get("u", None) self.vname = self.common_variables.get("v", None) self.wname = self.common_variables.get("w", None) self.temp_name = self.common_variables.get("temp", None) self.salt_name = self.common_variables.get("salt", None) self.xname = self.common_variables.get("x", None) self.yname = self.common_variables.get("y", None) self.zname = self.common_variables.get("z", None) self.tname = self.common_variables.get("time", None) self.active = None def load_initial_dataset(self): """ Initialize self.dataset, then close it A cacher will have to wrap this in locks, while a straight runner will not. """ try: self.dataset = CommonDataset.open(self.hydrodataset) if self.timevar is None: self.timevar = self.dataset.gettimevar( self.common_variables.get("u")) except Exception: logger.warn("No source dataset: %s. Particle exiting" % self.hydrodataset) raise def boundary_interaction(self, **kwargs): """ Returns a list of Location4D objects """ particle = kwargs.pop('particle') starting = kwargs.pop('starting') ending = kwargs.pop('ending') # shoreline if self.useshore: intersection_point = self._shoreline.intersect( start_point=starting.point, end_point=ending.point) if intersection_point is not None: # Set the intersection point. hitpoint = Location4D(point=intersection_point['point'], time=starting.time + (ending.time - starting.time)) particle.location = hitpoint # This relies on the shoreline to put the particle in water and not on shore. resulting_point = self._shoreline.react( start_point=starting, end_point=ending, hit_point=hitpoint, reverse_distance=self.reverse_distance, feature=intersection_point['feature'], distance=kwargs.get('distance'), angle=kwargs.get('angle'), azimuth=kwargs.get('azimuth'), reverse_azimuth=kwargs.get('reverse_azimuth')) ending.latitude = resulting_point.latitude ending.longitude = resulting_point.longitude ending.depth = resulting_point.depth if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - hit the shoreline at %s. Setting location to %s." % (particle.logstring(), hitpoint.logstring(), ending.logstring())) # bathymetry if self.usebathy: if not particle.settled: bintersect = self._bathymetry.intersect(start_point=starting, end_point=ending) if bintersect: pt = self._bathymetry.react(type='reverse', start_point=starting, end_point=ending) if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - hit the bottom at %s. Setting location to %s." % (particle.logstring(), ending.logstring(), pt.logstring())) ending.latitude = pt.latitude ending.longitude = pt.longitude ending.depth = pt.depth # sea-surface if self.usesurface: if ending.depth > 0: if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - rose out of the water. Setting depth to 0." % particle.logstring()) ending.depth = 0 particle.location = ending def get_nearest_data(self, i): """ Note: self.dataset.opennc() must be called before calling this function. This is because the caching forcer must close it everytime, while a non caching forcer can leave the dataset open. """ try: # Grab data at time index closest to particle location u = np.mean( np.mean( self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.particle.location))) v = np.mean( np.mean( self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.particle.location))) # if there is vertical velocity inthe dataset, get it if 'w' in self.dataset.nc.variables: w = np.mean( np.mean( self.dataset.get_values('w', timeindsf=[np.asarray([i])], point=self.particle.location))) else: w = 0.0 # If there is salt and temp in the dataset, get it if self.temp_name is not None and self.salt_name is not None: temp = np.mean( np.mean( self.dataset.get_values('temp', timeinds=[np.asarray([i])], point=self.particle.location))) salt = np.mean( np.mean( self.dataset.get_values('salt', timeinds=[np.asarray([i])], point=self.particle.location))) # Check for nans that occur in the ocean (happens because # of model and coastline resolution mismatches) if np.isnan(u).any() or np.isnan(v).any() or np.isnan(w).any(): # Take the mean of the closest 4 points # If this includes nan which it will, result is nan uarray1 = self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.particle.location, num=2) varray1 = self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.particle.location, num=2) if 'w' in self.dataset.nc.variables: warray1 = self.dataset.get_values( 'w', timeinds=[np.asarray([i])], point=self.particle.location, num=2) w = warray1.mean() else: w = 0.0 if self.temp_name is not None and self.salt_name is not None: temparray1 = self.dataset.get_values( 'temp', timeinds=[np.asarray([i])], point=self.particle.location, num=2) saltarray1 = self.dataset.get_values( 'salt', timeinds=[np.asarray([i])], point=self.particle.location, num=2) temp = temparray1.mean() salt = saltarray1.mean() u = uarray1.mean() v = varray1.mean() if self.temp_name is None: temp = np.nan if self.salt_name is None: salt = np.nan except Exception: logger.exception("Could not retrieve data.") raise return u, v, w, temp, salt def get_linterp_data(self, i, currenttime): """ Note: self.dataset.opennc() must be called before calling this function. This is because the caching forcer must close it everytime, while a non caching forcer can leave the dataset open. """ try: # Grab data at time index closest to particle location u = [ np.mean( np.mean( self.dataset.get_values( 'u', timeinds=[np.asarray([i])], point=self.particle.location))), np.mean( np.mean( self.dataset.get_values('u', timeinds=[np.asarray([i + 1])], point=self.particle.location))) ] v = [ np.mean( np.mean( self.dataset.get_values( 'v', timeinds=[np.asarray([i])], point=self.particle.location))), np.mean( np.mean( self.dataset.get_values('v', timeinds=[np.asarray([i + 1])], point=self.particle.location))) ] # if there is vertical velocity inthe dataset, get it if 'w' in self.dataset.nc.variables: w = [ np.mean( np.mean( self.dataset.get_values( 'w', timeinds=[np.asarray([i])], point=self.particle.location))), np.mean( np.mean( self.dataset.get_values( 'w', timeinds=[np.asarray([i + 1])], point=self.particle.location))) ] else: w = [0.0, 0.0] # If there is salt and temp in the dataset, get it if self.temp_name is not None and self.salt_name is not None: temp = [ np.mean( np.mean( self.dataset.get_values( 'temp', timeinds=[np.asarray([i])], point=self.particle.location))), np.mean( np.mean( self.dataset.get_values( 'temp', timeinds=[np.asarray([i + 1])], point=self.particle.location))) ] salt = [ np.mean( np.mean( self.dataset.get_values( 'salt', timeinds=[np.asarray([i])], point=self.particle.location))), np.mean( np.mean( self.dataset.get_values( 'salt', timeinds=[np.asarray([i + 1])], point=self.particle.location))) ] # Check for nans that occur in the ocean (happens because # of model and coastline resolution mismatches) if np.isnan(u).any() or np.isnan(v).any() or np.isnan(w).any(): # Take the mean of the closest 4 points # If this includes nan which it will, result is nan uarray1 = self.dataset.get_values('u', timeinds=[np.asarray([i])], point=self.particle.location, num=2) varray1 = self.dataset.get_values('v', timeinds=[np.asarray([i])], point=self.particle.location, num=2) uarray2 = self.dataset.get_values( 'u', timeinds=[np.asarray([i + 1])], point=self.particle.location, num=2) varray2 = self.dataset.get_values( 'v', timeinds=[np.asarray([i + 1])], point=self.particle.location, num=2) if 'w' in self.dataset.nc.variables: warray1 = self.dataset.get_values( 'w', timeinds=[np.asarray([i])], point=self.particle.location, num=2) warray2 = self.dataset.get_values( 'w', timeinds=[np.asarray([i + 1])], point=self.particle.location, num=2) w = [warray1.mean(), warray2.mean()] else: w = [0.0, 0.0] if self.temp_name is not None and self.salt_name is not None: temparray1 = self.dataset.get_values( 'temp', timeinds=[np.asarray([i])], point=self.particle.location, num=2) saltarray1 = self.dataset.get_values( 'salt', timeinds=[np.asarray([i])], point=self.particle.location, num=2) temparray2 = self.dataset.get_values( 'temp', timeinds=[np.asarray([i + 1])], point=self.particle.location, num=2) saltarray2 = self.dataset.get_values( 'salt', timeinds=[np.asarray([i + 1])], point=self.particle.location, num=2) temp = [temparray1.mean(), temparray2.mean()] salt = [saltarray1.mean(), saltarray2.mean()] u = [uarray1.mean(), uarray2.mean()] v = [varray1.mean(), varray2.mean()] # Linear interp of data between timesteps currenttime = date2num(currenttime) timevar = self.timevar.datenum u = self.linterp(timevar[i:i + 2], u, currenttime) v = self.linterp(timevar[i:i + 2], v, currenttime) w = self.linterp(timevar[i:i + 2], w, currenttime) if self.temp_name is not None and self.salt_name is not None: temp = self.linterp(timevar[i:i + 2], temp, currenttime) salt = self.linterp(timevar[i:i + 2], salt, currenttime) if self.temp_name is None: temp = np.nan if self.salt_name is None: salt = np.nan except Exception: logger.exception("Could not retrieve data.") raise return u, v, w, temp, salt def linterp(self, setx, sety, x): """ Linear interp of model data values between time steps """ if math.isnan(sety[0]) or math.isnan(setx[0]): return np.nan return sety[0] + (x - setx[0]) * ((sety[1] - sety[0]) / (setx[1] - setx[0])) def run(self): self.load_initial_dataset() redis_connection = None if self.redis_url is not None and self.redis_results_channel is not None: import redis redis_connection = redis.from_url(self.redis_url) # Setup shoreline self._shoreline = None if self.useshore is True: self._shoreline = Shoreline( path=self.shoreline_path, feature_name=self.shoreline_feature, point=self.release_location_centroid, spatialbuffer=self.shoreline_index_buffer) # Make sure we are not starting on land. Raises exception if we are. self._shoreline.intersect( start_point=self.release_location_centroid, end_point=self.release_location_centroid) # Setup Bathymetry if self.usebathy is True: try: self._bathymetry = Bathymetry(file=self.bathy_path) except Exception: logger.exception( "Could not load Bathymetry file: %s, using no Bathymetry for this run!" % self.bathy_path) self.usebathy = False # Calculate datetime at every timestep modelTimestep, newtimes = AsaTransport.get_time_objects_from_model_timesteps( self.times, start=self.start_time) if self.time_method == 'interp': time_indexs = self.timevar.nearest_index(newtimes, select='before') elif self.time_method == 'nearest': time_indexs = self.timevar.nearest_index(newtimes) else: logger.warn("Method for computing u,v,w,temp,salt not supported!") try: assert len(newtimes) == len(time_indexs) except AssertionError: logger.exception( "Time indexes are messed up. Need to have equal datetime and time indexes" ) raise # Keep track of how much time we spend in each area. tot_boundary_time = 0. tot_model_time = {} tot_read_data = 0. for m in self.models: tot_model_time[m.name] = 0. # Set the base conditions # If using Redis, send the results if redis_connection is not None: redis_connection.publish(self.redis_results_channel, json.dumps(self.particle.timestep_dump())) # loop over timesteps # We don't loop over the last time_index because # we need to query in the time_index and set the particle's # location as the 'newtime' object. for loop_i, i in enumerate(time_indexs[0:-1]): if self.active and self.active.value is False: raise ValueError("Particle exiting due to Failure.") newloc = None st = time.clock() # Get the variable data required by the models if self.time_method == 'nearest': u, v, w, temp, salt = self.get_nearest_data(i) elif self.time_method == 'interp': u, v, w, temp, salt = self.get_linterp_data( i, newtimes[loop_i]) else: logger.warn( "Method for computing u,v,w,temp,salt is unknown. Only 'nearest' and 'interp' are supported." ) tot_read_data += (time.clock() - st) # Get the bathy value at the particles location if self.usebathy is True: bathymetry_value = self._bathymetry.get_depth( self.particle.location) else: bathymetry_value = -999999999999999 # Age the particle by the modelTimestep (seconds) # 'Age' meaning the amount of time it has been forced. self.particle.age(seconds=modelTimestep[loop_i]) # loop over models - sort these in the order you want them to run for model in self.models: st = time.clock() movement = model.move(self.particle, u, v, w, modelTimestep[loop_i], temperature=temp, salinity=salt, bathymetry_value=bathymetry_value) newloc = Location4D(latitude=movement['latitude'], longitude=movement['longitude'], depth=movement['depth'], time=newtimes[loop_i + 1]) tot_model_time[m.name] += (time.clock() - st) if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - moved %.3f meters (horizontally) and %.3f meters (vertically) by %s with data from %s" % (self.particle.logstring(), movement['distance'], movement['vertical_distance'], model.__class__.__name__, newtimes[loop_i].isoformat())) if newloc: st = time.clock() self.boundary_interaction( particle=self.particle, starting=self.particle.location, ending=newloc, distance=movement['distance'], angle=movement['angle'], azimuth=movement['azimuth'], reverse_azimuth=movement['reverse_azimuth'], vertical_distance=movement['vertical_distance'], vertical_angle=movement['vertical_angle']) tot_boundary_time += (time.clock() - st) if logger.isEnabledFor(logging.DEBUG): logger.debug( "%s - was forced by %s and is now at %s" % (self.particle.logstring(), model.__class__.__name__, self.particle.location.logstring())) self.particle.note = self.particle.outputstring() # Each timestep, save the particles status and environmental variables. # This keep fields such as temp, salt, halted, settled, and dead matched up with the number of timesteps self.particle.save() # If using Redis, send the results if redis_connection is not None: redis_connection.publish( self.redis_results_channel, json.dumps(self.particle.timestep_dump())) self.dataset.closenc() # We won't pull data for the last entry in locations, but we need to populate it with fill data. self.particle.fill_gap() if self.usebathy is True: self._bathymetry.close() if self.useshore is True: self._shoreline.close() logger.info( textwrap.dedent('''Particle %i Stats: Data read: %f seconds Model forcing: %s seconds Boundary intersection: %f seconds''' % (self.particle.uid, tot_read_data, { s: '{:g} seconds'.format(f) for s, f in list(tot_model_time.items()) }, tot_boundary_time))) return self.particle def __call__(self, active): self.active = active return self.run()