Ejemplo n.º 1
0
def particle(hydrodataset, part, model):

    from paegan.logger import logger
    from paegan.logger.redis_handler import RedisHandler
    rhandler = RedisHandler(model.redis_log_channel, model.redis_url)
    rhandler.setLevel(logging.PROGRESS)
    logger.addHandler(rhandler)

    try:
        redis_connection = redis.from_url(model.redis_url)
        forcer = BaseForcer(hydrodataset,
                            particle=part,
                            common_variables=model.common_variables,
                            times=model.times,
                            start_time=model.start,
                            models=model._models,
                            release_location_centroid=model.reference_location.point,
                            usebathy=model._use_bathymetry,
                            useshore=model._use_shoreline,
                            usesurface=model._use_seasurface,
                            reverse_distance=model.reverse_distance,
                            bathy_path=model.bathy_path,
                            shoreline_path=model.shoreline_path,
                            shoreline_feature=model.shoreline_feature,
                            time_method=model.time_method,
                            redis_url=model.redis_url,
                            redis_results_channel=model.redis_results_channel,
                            shoreline_index_buffer=model.shoreline_index_buffer
                           )
        forcer.run()
    except Exception:
        redis_connection.publish(model.redis_results_channel, json.dumps({"status" : "FAILED", "uid" : part.uid }))
    else:
        redis_connection.publish(model.redis_results_channel, json.dumps({"status" : "COMPLETED", "uid" : part.uid }))
Ejemplo n.º 2
0
    def setup_run(self, hydrodataset, **kwargs):
        from paegan.logger.redis_handler import RedisHandler
        self.redis_url = kwargs.get("redis_url")
        self.redis_log_channel = kwargs.get("redis_log_channel")
        self.redis_results_channel = kwargs.get("redis_results_channel")
        rhandler = RedisHandler(self.redis_log_channel, self.redis_url)
        rhandler.setLevel(logging.PROGRESS)
        logger.addHandler(rhandler)

        super(DistributedModelController,
              self).setup_run(hydrodataset, **kwargs)
Ejemplo n.º 3
0
def particle_runner(part, model):

    from paegan.logger import logger
    logger.setLevel(logging.PROGRESS)

    from paegan.logger.redis_handler import RedisHandler
    rhandler = RedisHandler(model.redis_log_channel, model.redis_url)
    rhandler.setLevel(logging.PROGRESS)
    logger.addHandler(rhandler)

    try:
        redis_connection = redis.from_url(model.redis_url)
        forcer = BaseForcer(
            model.hydrodataset,
            particle=part,
            common_variables=model.common_variables,
            times=model.times,
            start_time=model.start,
            models=model._models,
            release_location_centroid=model.reference_location.point,
            usebathy=model._use_bathymetry,
            useshore=model._use_shoreline,
            usesurface=model._use_seasurface,
            reverse_distance=model.reverse_distance,
            bathy_path=model.bathy_path,
            shoreline_path=model.shoreline_path,
            shoreline_feature=model.shoreline_feature,
            time_method=model.time_method,
            redis_url=model.redis_url,
            redis_results_channel=model.redis_results_channel,
            shoreline_index_buffer=model.shoreline_index_buffer)
        forcer.run()
    except Exception:
        logger.exception(traceback.format_exc())
        redis_connection.publish(
            model.redis_results_channel,
            json.dumps({
                "status": "FAILED",
                "uid": part.uid
            }))
    else:
        redis_connection.publish(
            model.redis_results_channel,
            json.dumps({
                "status": "COMPLETED",
                "uid": part.uid
            }))
Ejemplo n.º 4
0
    def setup_run(self, **kwargs):

        logger.setLevel(logging.PROGRESS)

        self.redis_url             = None
        self.redis_log_channel     = None
        self.redis_results_channel = None
        if "redis" in kwargs.get("output_formats", []):
            from paegan.logger.redis_handler import RedisHandler
            self.redis_url             = kwargs.get("redis_url")
            self.redis_log_channel     = kwargs.get("redis_log_channel")
            self.redis_results_channel = kwargs.get("redis_results_channel")
            rhandler = RedisHandler(self.redis_log_channel, self.redis_url)
            rhandler.setLevel(logging.PROGRESS)
            logger.addHandler(rhandler)

        # Relax.
        time.sleep(0.5)

        # Add ModelController description to logfile
        logger.info(unicode(self))

        # Add the model descriptions to logfile
        for m in self._models:
            logger.info(unicode(m))

        # Calculate the model timesteps
        # We need times = len(self._nstep) + 1 since data is stored one timestep
        # after a particle is forced with the final timestep's data.
        self.times = range(0, (self._step*self._nstep)+1, self._step)
        # Calculate a datetime object for each model timestep
        # This method is duplicated in CachingDataController and CachingForcer
        # using the 'times' variables above.  Will be useful in those other
        # locations for particles released at different times
        # i.e. released over a few days
        self.modelTimestep, self.datetimes = AsaTransport.get_time_objects_from_model_timesteps(self.times, start=self.start)

        logger.progress((1, "Setting up particle start locations"))
        point_locations = []
        if isinstance(self.geometry, Point):
            point_locations = [self.reference_location] * self._npart
        elif isinstance(self.geometry, Polygon) or isinstance(self.geometry, MultiPolygon):
            point_locations = [Location4D(latitude=loc.y, longitude=loc.x, depth=self._depth, time=self.start) for loc in AsaTransport.fill_polygon_with_points(goal=self._npart, polygon=self.geometry)]

        # Initialize the particles
        logger.progress((2, "Initializing particles"))
        for x in xrange(0, self._npart):
            p = LarvaParticle(id=x)
            p.location = point_locations[x]
            # We don't need to fill the location gaps here for environment variables
            # because the first data collected actually relates to this original
            # position.
            # We do need to fill in fields such as settled, halted, etc.
            p.fill_status_gap()
            # Set the inital note
            p.note = p.outputstring()
            p.notes.append(p.note)
            self.particles.append(p)

        if kwargs.get("manager", True):
            # Get the number of cores (may take some tuning) and create that
            # many workers then pass particles into the queue for the workers
            self.mgr = multiprocessing.Manager()

            # This tracks if the system is 'alive'.  Most looping whiles will check this
            # and break out if it is False.  This is True until something goes very wrong.
            self.active = self.mgr.Value('bool', True)

            # Each particle is a task, plus the CachingDataController
            self.number_of_tasks = self.get_number_of_tasks()

            # Either spin up the number of cores, or the number of tasks
            self.nproc = min(multiprocessing.cpu_count() - 1, self.number_of_tasks)

            # Number of tasks that we need to run.  This is decremented everytime something exits.
            self.n_run = self.mgr.Value('int', self.number_of_tasks)
            # The lock that controls access to the 'n_run' variable
            self.nproc_lock = self.mgr.Lock()

            # Create the task queue for all of the particles and the CachingDataController
            self.tasks = multiprocessing.JoinableQueue(self.number_of_tasks)
            # Create the result queue for all of the particles and the CachingDataController
            self.results = self.mgr.Queue(self.number_of_tasks)

        logger.progress((3, "Initializing and caching hydro model's grid"))
        try:
            ds = CommonDataset.open(self.hydrodataset)
        except Exception:
            logger.exception("Failed to access dataset %s" % self.hydrodataset)
            raise BaseDataControllerError("Inaccessible Dataset: %s" % self.hydrodataset)
        # Query the dataset for common variable names
        # and the time variable.
        logger.debug("Retrieving variable information from dataset")
        self.common_variables = self.get_common_variables_from_dataset(ds)

        self.timevar = None
        try:
            assert self.common_variables.get("u") in ds._current_variables
            assert self.common_variables.get("v") in ds._current_variables
            assert self.common_variables.get("x") in ds._current_variables
            assert self.common_variables.get("y") in ds._current_variables

            self.timevar = ds.gettimevar(self.common_variables.get("u"))
        except AssertionError:
            logger.exception("Could not locate variables needed to run model: %s" % unicode(self.common_variables))
            raise BaseDataControllerError("A required data variable was not found in %s" % self.hydrodataset)

        model_start = self.timevar.get_dates()[0]
        model_end   = self.timevar.get_dates()[-1]

        try:
            assert self.start > model_start
            assert self.start < model_end
        except AssertionError:
            raise BaseDataControllerError("Start time for model (%s) is not available in source dataset (%s/%s)" % (self.datetimes[0], model_start, model_end))

        try:
            assert self.datetimes[-1] > model_start
            assert self.datetimes[-1] < model_end
        except AssertionError:
            raise BaseDataControllerError("End time for model (%s) is not available in source dataset (%s/%s)" % (self.datetimes[-1], model_start, model_end))

        ds.closenc()