def test_diel_migration(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_diel_migration") num_steps = 168 num_particles = 4 start_time = datetime(2013,4,1,0, tzinfo=pytz.utc) # Behavior behavior_config = open(os.path.normpath(os.path.join(os.path.dirname(__file__),"./resources/files/diel_suncycles.json"))).read() lb = LarvaBehavior(json=behavior_config) models = [self.transport] models.append(lb) model = ModelController(latitude=60.68, longitude=-146.42, depth=self.start_depth, start=start_time, step=self.time_step, nstep=num_steps, npart=num_particles, models=models, use_bathymetry=True, use_shoreline=True, time_chunk=24, horiz_chunk=2, time_method='nearest') output_path = os.path.join(self.output_path, "test_diel_migration") shutil.rmtree(output_path, ignore_errors=True) os.makedirs(output_path) output_formats = ['Shapefile','NetCDF','Trackline'] cache_path = os.path.join(self.cache_path, "test_diel_migration.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_DAS.nc", bathy=self.bathy_file, cache=cache_path, output_path=output_path, output_formats=output_formats)
def test_interp(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_interp") models = [self.transport] num_steps = 100 output_path = os.path.join(self.output_path, "test_interp") shutil.rmtree(output_path, ignore_errors=True) os.makedirs(output_path) output_formats = ['Shapefile', 'NetCDF', 'Trackline'] model = ModelController(latitude=self.start_lat, longitude=self.start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=num_steps, npart=self.num_particles, models=models, use_bathymetry=True, use_shoreline=True, time_chunk=10, horiz_chunk=4) cache_path = os.path.join(self.cache_path, "test_interp.nc") model.run( "http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", bathy=self.bathy_file, cache=cache_path, output_path=output_path, output_formats=output_formats)
def test_timechunk_greater_than_timestep(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_timechunk_greater_than_timestep") # 6 days num_steps = 10 num_particles = 2 models = [self.transport] model = ModelController(latitude=self.start_lat, longitude=self.start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=num_steps, npart=num_particles, models=models, use_bathymetry=True, use_shoreline=True, time_chunk=24, horiz_chunk=2) cache_path = os.path.join(self.cache_path, "test_timechunk_greater_than_timestep.nc") model.run( "http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", bathy=self.bathy_file, cache=cache_path)
def test_run_from_polygon(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_run_from_polygon") models = [self.transport] poly = Point(self.start_lon, self.start_lat, self.start_depth).buffer(0.001) model = ModelController(geometry=poly, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4) cache_path = os.path.join( os.path.dirname(__file__), "..", "paegan/transport/_cache/test_run_from_polygon.nc") model.run( "http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", cache=cache_path)
def run(self, hydrodatasets, **kwargs): _wind_path = kwargs.get("winddataset", None) num_particles = 1 models = [Transport(horizDisp=0., vertDisp=0.)] if self._wind_model != None: models.append(WindForcing()) for drifter in self.drifters: start_location4d = drifter.locations[0] start_time = start_location4d.time start_lat = start_location4d.latitude start_lon = start_location4d.longitude start_depth = start_location4d.depth time_step = self.time_step num_steps = self.num_steps model = ModelController(latitude=start_lat, longitude=start_lon, depth=start_depth, start=start_time, step=time_step, nstep=num_steps, npart=num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=10) for hydromodel in hydrodatasets: model.run(hydromodel, wind=_wind_path) drifter.add_virtual_drifter(model.particles[0])
def test_bad_dataset(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_bad_dataset") models = [self.transport] model = ModelController(latitude=self.start_lat, longitude=self.start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4, time_method='nearest') cache_path = os.path.join(self.cache_path, "test_bad_dataset.nc") with raises(DataControllerError): model.run("http://asascience.com/thisisnotadataset.nc", cache=cache_path)
def test_start_on_land(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_start_on_land") # Set the start position and time for the models start_lat = 60.15551950079041 start_lon = -148.1999130249019 models = [self.transport] model = ModelController(latitude=start_lat, longitude=start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4, time_method='nearest') cache_path = os.path.join(self.cache_path, "test_start_on_land.nc") with raises(ModelError): model.run( "http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", cache=cache_path)
def test_quick_settlement(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_quick_settlement") num_steps = 24 num_particles = 4 # Behavior behavior_config = open(os.path.normpath(os.path.join(os.path.dirname(__file__),"./resources/files/behavior_quick_settle.json"))).read() lb = LarvaBehavior(json=behavior_config) models = [self.transport] models.append(lb) model = ModelController(latitude=self.start_lat, longitude=self.start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=num_steps, npart=num_particles, models=models, use_bathymetry=True, use_shoreline=True, time_chunk=12, horiz_chunk=2, time_method='nearest') output_path = os.path.join(self.output_path, "test_quick_settlement") shutil.rmtree(output_path, ignore_errors=True) os.makedirs(output_path) output_formats = ['Shapefile','NetCDF','Trackline','Pickle'] cache_path = os.path.join(self.cache_path, "test_quick_settlement.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", bathy=self.bathy_file, cache=cache_path, output_path=output_path, output_formats=output_formats)
def test_nearest(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_nearest") models = [self.transport] model = ModelController(latitude=self.start_lat, longitude=self.start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4, time_method='nearest') cache_path = os.path.join(self.cache_path, "test_nearest.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", cache=cache_path)
def test_run_from_multiple_files_without_cache(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_run_from_multiple_files_without_cache") models = [self.transport] p = Point(self.start_lon, self.start_lat, self.start_depth) model = ModelController(geometry=p, start=datetime(2014, 1, 2, 0), step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4) model.run("/data/lm/tests/pws_das_2014*.nc", bathy=self.bathy_file, caching=False, output_formats = ['NetCDF'], output_path=os.path.join(self.output_path, "test_run_from_multiple_files_without_cache"))
def test_run_from_point(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_run_from_point") models = [self.transport] p = Point(self.start_lon, self.start_lat, self.start_depth) model = ModelController(geometry=p, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4) cache_path = os.path.join(self.cache_path, "test_run_from_point.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", cache=cache_path)
def test_run_from_polygon(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_run_from_polygon") models = [self.transport] poly = Point(self.start_lon, self.start_lat, self.start_depth).buffer(0.001) model = ModelController(geometry=poly, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4) cache_path = os.path.join(os.path.dirname(__file__), "..", "paegan/transport/_cache/test_run_from_polygon.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", cache=cache_path)
def test_run_from_point_with_wfs(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_run_from_point_with_wfs") models = [self.transport] p = Point(self.start_lon, self.start_lat, self.start_depth) model = ModelController(geometry=p, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4, shoreline_path='http://geo.asascience.com/geoserver/shorelines/ows', shoreline_feature='shorelines:10m_land_polygons') cache_path = os.path.join(self.cache_path, "test_run_from_point.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", cache=cache_path)
def test_timechunk_greater_than_timestep(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_timechunk_greater_than_timestep") # 6 days num_steps = 10 num_particles = 2 models = [self.transport] model = ModelController(latitude=self.start_lat, longitude=self.start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=num_steps, npart=num_particles, models=models, use_bathymetry=True, use_shoreline=True, time_chunk=24, horiz_chunk=2) cache_path = os.path.join(self.cache_path, "test_timechunk_greater_than_timestep.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", bathy=self.bathy_file, cache=cache_path)
def test_start_on_land(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_start_on_land") # Set the start position and time for the models start_lat = 60.15551950079041 start_lon = -148.1999130249019 models = [self.transport] model = ModelController(latitude=start_lat, longitude=start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4, time_method='nearest') cache_path = os.path.join(self.cache_path, "test_start_on_land.nc") with raises(ModelError): model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", cache=cache_path)
def test_diel_migration(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_diel_migration") num_steps = 168 num_particles = 4 start_time = datetime(2013, 4, 1, 0, tzinfo=pytz.utc) # Behavior behavior_config = open( os.path.normpath( os.path.join(os.path.dirname(__file__), "./resources/files/diel_suncycles.json"))).read() lb = LarvaBehavior(json=behavior_config) models = [self.transport] models.append(lb) model = ModelController(latitude=60.68, longitude=-146.42, depth=self.start_depth, start=start_time, step=self.time_step, nstep=num_steps, npart=num_particles, models=models, use_bathymetry=True, use_shoreline=True, time_chunk=24, horiz_chunk=2, time_method='nearest') output_path = os.path.join(self.output_path, "test_diel_migration") shutil.rmtree(output_path, ignore_errors=True) os.makedirs(output_path) output_formats = ['Shapefile', 'NetCDF', 'Trackline'] cache_path = os.path.join(self.cache_path, "test_diel_migration.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_DAS.nc", bathy=self.bathy_file, cache=cache_path, output_path=output_path, output_formats=output_formats)
def test_interp(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_interp") models = [self.transport] num_steps = 100 output_path = os.path.join(self.output_path, "test_interp") shutil.rmtree(output_path, ignore_errors=True) os.makedirs(output_path) output_formats = ['Shapefile','NetCDF','Trackline'] model = ModelController(latitude=self.start_lat, longitude=self.start_lon, depth=self.start_depth, start=self.start_time, step=self.time_step, nstep=num_steps, npart=self.num_particles, models=models, use_bathymetry=True, use_shoreline=True, time_chunk=10, horiz_chunk=4) cache_path = os.path.join(self.cache_path, "test_interp.nc") model.run("http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", bathy=self.bathy_file, cache=cache_path, output_path=output_path, output_formats=output_formats)
def test_run_from_point(self): self.log.logger.info("**************************************") self.log.logger.info("Running: test_run_from_point") models = [self.transport] p = Point(self.start_lon, self.start_lat, self.start_depth) model = ModelController(geometry=p, start=self.start_time, step=self.time_step, nstep=self.num_steps, npart=self.num_particles, models=models, use_bathymetry=False, use_shoreline=True, time_chunk=10, horiz_chunk=4) cache_path = os.path.join(self.cache_path, "test_run_from_point.nc") model.run( "http://thredds.axiomalaska.com/thredds/dodsC/PWS_L2_FCST.nc", cache=cache_path)
def run(run_id): # Sleep to give the Run object enough time to save time.sleep(10) with app.app_context(): from paegan.logger import logger job = get_current_job() output_path = os.path.join(current_app.config['OUTPUT_PATH'], run_id) shutil.rmtree(output_path, ignore_errors=True) os.makedirs(output_path) cache_path = os.path.join(current_app.config['CACHE_PATH'], run_id) shutil.rmtree(cache_path, ignore_errors=True) os.makedirs(cache_path) temp_animation_path = os.path.join(current_app.config['OUTPUT_PATH'], "temp_images_" + run_id) shutil.rmtree(temp_animation_path, ignore_errors=True) os.makedirs(temp_animation_path) # Set up Logger queue = multiprocessing.Queue(-1) f, log_file = tempfile.mkstemp(dir=cache_path, prefix=run_id, suffix=".log") os.close(f) # Close any existing handlers (hand.close() for hand in logger.handlers) # Remove any existing handlers logger.handlers = [] logger.setLevel(logging.PROGRESS) handler = MultiProcessingLogHandler(log_file, queue) handler.setLevel(logging.PROGRESS) formatter = logging.Formatter('[%(asctime)s] - %(levelname)s - %(name)s - %(processName)s - %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) # Progress stuff. Hokey! progress_deque = collections.deque(maxlen=1) progress_handler = ProgressHandler(progress_deque) progress_handler.setLevel(logging.PROGRESS) logger.addHandler(progress_handler) e = threading.Event() def save_progress(): while e.wait(5) is not True: try: record = progress_deque.pop() if record == StopIteration: break job.meta["updated"] = record[0] if record is not None and record[1] >= 0: job.meta["progress"] = record[1] if isinstance(record[2], unicode) or isinstance(record[2], str): job.meta["message"] = record[2] job.save() except IndexError: pass except Exception: raise return t = threading.Thread(name="ProgressUpdater", target=save_progress) t.daemon = True t.start() model = None try: logger.progress((0, "Configuring model")) run = db.Run.find_one( { '_id' : ObjectId(run_id) } ) if run is None: return "Failed to locate run %s. May have been deleted while task was in the queue?" % run_id geometry = loads(run['geometry']) start_depth = run['release_depth'] num_particles = run['particles'] time_step = run['timestep'] num_steps = int(math.ceil((run['duration'] * 24 * 60 * 60) / time_step)) start_time = run['start'].replace(tzinfo = pytz.utc) shoreline_path = run['shoreline_path'] or app.config.get("SHORE_PATH") shoreline_feat = run['shoreline_feature'] # Set up output directory/bucket for run output_formats = ['Shapefile', 'NetCDF', 'Trackline'] # Setup Models models = [] if run['cached_behavior'] is not None and run['cached_behavior'].get('results', None) is not None: behavior_data = run['cached_behavior']['results'][0] l = LarvaBehavior(data=behavior_data) models.append(l) models.append(Transport(horizDisp=run['horiz_dispersion'], vertDisp=run['vert_dispersion'])) # Setup ModelController model = ModelController(geometry=geometry, depth=start_depth, start=start_time, step=time_step, nstep=num_steps, npart=num_particles, models=models, use_bathymetry=True, use_shoreline=True, time_chunk=run['time_chunk'], horiz_chunk=run['horiz_chunk'], time_method=run['time_method'], shoreline_path=shoreline_path, shoreline_feature=shoreline_feat, reverse_distance=1500) # Run the model cache_file = os.path.join(cache_path, run_id + ".nc.cache") bathy_file = current_app.config['BATHY_PATH'] model.run(run['hydro_path'], output_path=output_path, bathy=bathy_file, output_formats=output_formats, cache=cache_file, remove_cache=False, caching=run['caching']) # Skip creating movie output_path """ from paegan.viz.trajectory import CFTrajectory logger.info("Creating animation...") for filename in os.listdir(output_path): if os.path.splitext(filename)[1][1:] == "nc": # Found netCDF file netcdf_file = os.path.join(output_path,filename) traj = CFTrajectory(netcdf_file) success = traj.plot_animate(os.path.join(output_path,'animation.avi'), temp_folder=temp_animation_path, bathy=app.config['BATHY_PATH']) if not success: logger.info("Could not create animation") else: logger.info("Animation saved") """ job.meta["outcome"] = "success" job.save() return "Successfully ran %s" % run_id except Exception as exception: logger.warn("Run FAILED, cleaning up and uploading log.") logger.warn(exception.message) job.meta["outcome"] = "failed" job.save() raise finally: logger.progress((99, "Processing output files")) # Close the handler so we can upload the log file without a file lock (hand.close() for hand in logger.handlers) queue.put(StopIteration) # Break out of the progress loop e.set() t.join() # Move logfile to output directory shutil.move(log_file, os.path.join(output_path, 'model.log')) # Move cachefile to output directory if we made one if run['caching']: shutil.move(cache_file, output_path) output_files = [] for filename in os.listdir(output_path): outfile = os.path.join(output_path, filename) output_files.append(outfile) result_files = [] base_access_url = current_app.config.get('NON_S3_OUTPUT_URL', None) # Handle results and cleanup if current_app.config['USE_S3'] is True: base_access_url = urljoin("http://%s.s3.amazonaws.com/output/" % current_app.config['S3_BUCKET'], run_id) # Upload results to S3 and remove the local copies conn = S3Connection() bucket = conn.get_bucket(current_app.config['S3_BUCKET']) for outfile in output_files: # Don't upload the cache file if os.path.basename(outfile) == os.path.basename(cache_file): continue # Upload the outfile with the same as the run name _, ext = os.path.splitext(outfile) new_filename = slugify(unicode(run['name'])) + ext k = Key(bucket) k.key = "output/%s/%s" % (run_id, new_filename) k.set_contents_from_filename(outfile) k.set_acl('public-read') result_files.append(base_access_url + "/" + new_filename) os.remove(outfile) shutil.rmtree(output_path, ignore_errors=True) else: for outfile in output_files: result_files.append(urljoin(base_access_url, run_id) + "/" + os.path.basename(outfile)) shutil.rmtree(temp_animation_path, ignore_errors=True) # Set output fields run.output = result_files run.ended = datetime.utcnow() run.compute() run.save() # Cleanup logger.removeHandler(handler) del formatter del handler del logger del model queue.close() job.meta["message"] = "Complete" job.save()