Exemple #1
0
 def _run_on_fire(self, fire):
     working_dir = Config().get('emissions', 'ubc-bsf-feps', 'working_dir')
     delete_if_no_error = Config().get('emissions', 'ubc-bsf-feps',
                                       'delete_working_dir_if_no_error')
     with osutils.create_working_dir(
             working_dir=working_dir,
             delete_if_no_error=delete_if_no_error) as wdir:
         fire_working_dir = self._get_fire_working_dir(fire, wdir)
         if 'activity' not in fire:
             raise ValueError(
                 "Missing activity data required for computing Canadian emissions"
             )
         for aa in fire.active_areas:
             for loc in aa.locations:
                 if "consumption" not in loc:
                     raise ValueError(
                         "Missing consumption data required for computing Canadian emissions"
                     )
                 if 'fuelbeds' not in loc:
                     raise ValueError(
                         "Fuelbeds should be made in bsf load module before computing Canadian emissions"
                     )
                 if len(loc["fuelbeds"]) != 1:
                     raise ValueError(
                         "Each fuelbed array should only have one entry when running Canadian emissions"
                     )
                 loc["fuelbeds"][0]["emissions"] = self.emitter.run(
                     loc, fire_working_dir)
Exemple #2
0
 def _run_start_hour(self, start_hour):
     start_s = self._start + datetime.timedelta(hours=start_hour)
     working_dir_s = self._working_dir and os.path.join(
         self._working_dir, str(start_hour))
     with osutils.create_working_dir(working_dir=working_dir_s) as wdir:
         met_files = self._get_met_files(start_s)
         self._control_file_writer.write(start_s, met_files, wdir)
         self._setup_file_writer.write(wdir)
         self._sym_link_met_files(wdir, met_files)
         self._sym_link_static_files(wdir)
         self._run_hysplit(wdir)
         self._output_loader.load(start_s, wdir)
Exemple #3
0
    def profile(self, lat, lng, local_start, local_end, utc_offset):
        """Returns local met profile for specific location and timewindow

        args:
         - lat -- latitude of location
         - lng -- longitude of location
         - local_start -- local datetime object representing beginning of time window
         - local_end -- local datetime object representing end of time window
         - utc_offset -- hours ahead of or behind UTC
        """
        # TODO: validate utc_offset?
        if local_start > local_end:
            raise ValueError(
                "Invalid localmet time window: start={}, end={}".format(
                    local_start, local_end))

        utc_start = local_start - timedelta(hours=utc_offset)
        utc_start_hour = datetime(utc_start.year, utc_start.month,
                                  utc_start.day, utc_start.hour)
        utc_end = local_end - timedelta(hours=utc_offset)
        utc_end_hour = datetime(utc_end.year, utc_end.month, utc_end.day,
                                utc_end.hour)
        # Don't include end hour if it's on the hour
        # TODO: should we indeed exclude it?
        if utc_end == utc_end_hour:
            utc_end_hour -= ONE_HOUR

        local_met_data = {}
        for met_file in self._met_files:
            if (met_file['first_hour'] > utc_end_hour
                    or met_file['last_hour'] < utc_start_hour):
                # met file has no data within given timewindow
                continue

            start = max(met_file['first_hour'], utc_start_hour)
            end = min(met_file['last_hour'], utc_end_hour)

            d, f = os.path.split(met_file["file"])
            # split returns dir without trailing slash, which is required by profile
            d = d + '/'

            with osutils.create_working_dir() as wdir:
                self._call(d, f, lat, lng)
                full_path_profile_txt = os.path.join(wdir,
                                                     self.PROFILE_OUTPUT_FILE)
                lmd = self._load(full_path_profile_txt, met_file['first_hour'],
                                 start, end, utc_offset, lat, lng)
            local_met_data.update(lmd)
        return local_met_data
Exemple #4
0
def _get_profiler(hourly_fractions, fire, active_area):
    tw = parse_datetimes(active_area, 'start', 'end')

    # Use FepsTimeProfiler for Rx fires and StaticTimeProfiler for WF,
    # Unless custom hourly_fractions are specified, in which case
    # Static Time Profiler is used for all fires.
    # If ignition_start and ignition_end aren't specified for Rx fires,
    # FepsTimeProfiler will assume 9am-12pm
    # TODO: add config setting to use FEPS for Rx even if custom
    #   hourly_fractions are specified (or the converse - i.e. alwys use
    #   FEPS for rx and add setting to turn on use of hourly_fractions,
    #   if specified, for Rx)
    if fire.type == 'rx' and not hourly_fractions:
        ig_start = active_area.get('ignition_start') and parse_datetime(
            active_area['ignition_start'], k='ignition_start')
        ig_end = active_area.get('ignition_end') and parse_datetime(
            active_area['ignition_end'], k='ignition_end')
        # TODO: pass in duff_fuel_load, total_above_ground_consumption,
        #    total_below_ground_consumption, moisture_category,
        #    relative_humidity, wind_speed, and duff_moisture_content,
        #    if defined?
        return FepsTimeProfiler(tw['start'],
                                tw['end'],
                                local_ignition_start_time=ig_start,
                                local_ignition_end_time=ig_end,
                                fire_type=FireType.RX)

    else:
        model_name = Config().get("timeprofile", "model").lower()
        if model_name == "ubc-bsf-feps":
            wfrtConfig = Config().get('timeprofile', 'ubc-bsf-feps')
            working_dir = wfrtConfig.get('working_dir')
            delete_if_no_error = wfrtConfig.get(
                'delete_working_dir_if_no_error')
            with osutils.create_working_dir(
                    working_dir=working_dir,
                    delete_if_no_error=delete_if_no_error) as wdir:
                fire_working_dir = os.path.join(
                    wdir, "feps-timeprofile-{}".format(fire.id))
                if not os.path.exists(fire_working_dir):
                    os.makedirs(fire_working_dir)
                return ubcbsffeps.UbcBsfFEPSTimeProfiler(
                    active_area, fire_working_dir, wfrtConfig)
        else:
            return StaticTimeProfiler(tw['start'],
                                      tw['end'],
                                      hourly_fractions=hourly_fractions)
Exemple #5
0
    def profile(self, lat, lng, local_start, local_end, utc_offset):
        """Returns local met profile for specific location and timewindow

        args:
         - lat -- latitude of location
         - lng -- longitude of location
         - local_start -- local datetime object representing beginning of time window
         - local_end -- local datetime object representing end of time window
         - utc_offset -- hours ahead of or behind UTC
        """
        # TODO: validate utc_offset?
        if local_start > local_end:
            raise ValueError("Invalid localmet time window: start={}, end={}".format(
              local_start, local_end))

        utc_start = local_start - timedelta(hours=utc_offset)
        utc_start_hour = datetime(utc_start.year, utc_start.month,
            utc_start.day, utc_start.hour)
        utc_end = local_end - timedelta(hours=utc_offset)
        utc_end_hour = datetime(utc_end.year, utc_end.month, utc_end.day,
            utc_end.hour)
        # Don't include end hour if it's on the hour
        # TODO: should we indeed exclude it?
        if utc_end == utc_end_hour:
            utc_end_hour -= ONE_HOUR

        local_met_data = {}
        for met_file in self._met_files:
            if (met_file['first_hour'] > utc_end_hour or
                    met_file['last_hour'] < utc_start_hour):
                # met file has no data within given timewindow
                continue

            start = max(met_file['first_hour'], utc_start_hour)
            end = min(met_file['last_hour'], utc_end_hour)

            d, f = os.path.split(met_file["file"])
            # split returns dir without trailing slash, which is required by profile
            d = d + '/'

            with osutils.create_working_dir() as wdir:
              self._call(d, f, lat, lng)
              full_path_profile_txt = os.path.join(wdir, self.PROFILE_OUTPUT_FILE)
              lmd = self._load(full_path_profile_txt, met_file['first_hour'],
                  start, end, utc_offset, lat, lng)
            local_met_data.update(lmd)
        return local_met_data
Exemple #6
0
    def run(self, fires, start, num_hours, output_dir, working_dir=None):
        """Runs hysplit

        args:
         - fires - list of fires to run through hysplit
         - start - model run start hour
         - num_hours - number of hours in model run
         - output_dir - directory to contain output

        kwargs:
         - working_dir -- working directory to write input files and output
            files (before they're copied over to final output directory);
            if not specified, a temp directory is created
        """
        logging.info("Running %s", self.__class__.__name__)

        self._warnings = []

        if start.minute or start.second or start.microsecond:
            raise ValueError("Dispersion start time must be on the hour.")
        if type(num_hours) != int:
            raise ValueError("Dispersion num_hours must be an integer.")
        self._model_start = start
        self._num_hours = num_hours

        self._run_output_dir = output_dir # already created

        self._working_dir = working_dir and os.path.abspath(working_dir)
        # osutils.create_working_dir will create working dir if necessary

        self._set_fire_data(fires)

        with osutils.create_working_dir(working_dir=self._working_dir) as wdir:
            r = self._run(wdir)

        r["output"].update({
            "directory": self._run_output_dir,
            "start_time": self._model_start.isoformat(),
            "num_hours": self._num_hours
        })
        if self._working_dir:
            r["output"]["working_dir"] = self._working_dir
        if self._warnings:
            r["warnings"] = self._warnings

        return r
Exemple #7
0
def run(fires_manager):
    """Runs plumerise module

    Args:
     - fires_manager -- bluesky.models.fires.FiresManager object
    """
    compute_func = ComputeFunction(fires_manager)

    with osutils.create_working_dir(
            working_dir=compute_func.config.get('working_dir')) as working_dir:
        for fire in fires_manager.fires:
            with fires_manager.fire_failure_handler(fire):
                compute_func(fire, working_dir=working_dir)

    # Make sure to distribute the heat if it was loaded here.
    if compute_func.config.get("load_heat"):
        datautils.summarize_all_levels(fires_manager, 'heat')
Exemple #8
0
    def _run_start_hour(self, start_hour):
        start_s = self._start + datetime.timedelta(hours=start_hour)
        working_dir_s = self._working_dir and os.path.join(
            self._working_dir, str(start_hour))

        locations = self._filter_locations(start_s)
        if not locations:
            logging.warn("No activity at start hour %s. Skipping hysplit",
                         start_s)
            return

        delete_if_no_error = Config().get('trajectories',
                                          'delete_working_dir_if_no_error')
        with osutils.create_working_dir(
                working_dir=working_dir_s,
                delete_if_no_error=delete_if_no_error) as wdir:
            met_files = self._get_met_files(start_s)
            self._control_file_writer.write(start_s, met_files, wdir,
                                            locations)
            self._setup_file_writer.write(wdir)
            self._sym_link_met_files(wdir, met_files)
            self._sym_link_static_files(wdir)
            self._run_hysplit(wdir)
            self._output_loader.load(start_s, wdir, locations)
Exemple #9
0
    def run(self,
            fires_manager,
            start,
            num_hours,
            output_dir,
            working_dir=None):
        """Runs hysplit

        args:
         - fires_manager - FiresManager object
         - start - model run start hour
         - num_hours - number of hours in model run
         - output_dir - directory to contain output

        kwargs:
         - working_dir -- working directory to write input files and output
            files (before they're copied over to final output directory);
            if not specified, a temp directory is created
        """
        logging.info("Running %s", self.__class__.__name__)

        self._warnings = []

        if start.minute or start.second or start.microsecond:
            raise ValueError("Dispersion start time must be on the hour.")
        if type(num_hours) != int:
            raise ValueError("Dispersion num_hours must be an integer.")
        self._model_start = start
        self._num_hours = num_hours

        self._run_output_dir = output_dir  # already created

        self._working_dir = working_dir and os.path.abspath(working_dir)
        # osutils.create_working_dir will create working dir if necessary

        counts = {'fires': len(fires_manager.fires)}
        self._set_fire_data(fires_manager.fires)
        counts['locations'] = len(self._fires)

        # TODO: only merge fires if hysplit, or make it configurable ???
        self._fires = firemerge.FireMerger().merge(self._fires)
        # TODO: should we pop 'end' from each fire object, since it's
        #   only used in _merge_fires logic?
        counts['distinct_locations'] = len(self._fires)

        pm_config = Config().get('dispersion', 'plume_merge')
        if pm_config:
            # TODO: make sure pm_config boundary includes all of disperion
            #   boundary, and raise BlueSkyConfigurationError if not?
            self._fires = firemerge.PlumeMerger(pm_config).merge(self._fires)

        counts['plumes'] = len(self._fires)
        notes = "Plumes to be modeled by dispersion"
        fires_manager.log_status('Good',
                                 'dispersion',
                                 'Continue',
                                 number_of_locations=counts['plumes'],
                                 notes=notes)

        with osutils.create_working_dir(working_dir=self._working_dir) as wdir:
            r = self._run(wdir)

        r["counts"] = counts
        r["output"].update({
            "directory": self._run_output_dir,
            "start_time": self._model_start.isoformat(),
            "num_hours": self._num_hours
        })
        if self._working_dir:
            r["output"]["working_dir"] = self._working_dir
        if self._warnings:
            r["warnings"] = self._warnings

        return r
Exemple #10
0
    def run(self):

        if vis_hysplit_config('output_dir'):
            output_directory = vis_hysplit_config('output_dir')
        else:
            output_directory =  self._hysplit_output_directory
        data_dir = os.path.join(output_directory, vis_hysplit_config('data_dir'))
        if not os.path.exists(data_dir):
            os.makedirs(data_dir)

        files = {
            'fire_locations_csv': self._get_file_name(
                data_dir, 'fire_locations_csv'),
            'fire_events_csv': self._get_file_name(
                data_dir, 'fire_events_csv'),
            'smoke_dispersion_kmz': self._get_file_name(
                output_directory, 'smoke_dispersion_kmz'),
            'fire_kmz': self._get_file_name(
                output_directory, 'fire_kmz')
        }

        self._generate_fire_csv_files(files['fire_locations_csv']['pathname'],
            files['fire_events_csv']['pathname'])

        self._generate_summary_json(output_directory)

        config_options = self._get_config_options(output_directory)

        layers = vis_hysplit_config('layers')
        args = BlueskyKmlArgs(
            output_directory=str(output_directory),
            configfile=None, # TODO: allow this to be configurable?
            prettykml=vis_hysplit_config('prettykml'),
            # in blueskykml, if verbose is True, then logging level will be set
            # DEBUG; otherwise, logging level is left as is.  bsp already takes
            # care of setting log level, so setting verbose to False will let
            # blueskykml inherit logging level
            verbose=False,
            config_options=config_options,
            inputfile=str(self._hysplit_output_file),
            fire_locations_csv=str(files['fire_locations_csv']['pathname']),
            fire_events_csv=str(files['fire_events_csv']['pathname']),
            smoke_dispersion_kmz_file=str(files['smoke_dispersion_kmz']['pathname']),
            fire_kmz_file=str(files['fire_kmz']['pathname']),
            # blueskykml now supports layers specified as list of ints
            layers=layers
        )

        try:
            # Note: using create_working_dir effectively marks any
            #  intermediate outputs for cleanup
            with osutils.create_working_dir() as wdir:
                makedispersionkml.main(args)
        except blueskykml_configuration.ConfigurationError as e:
            raise BlueSkyConfigurationError(".....")

        return {
            'blueskykml_version': blueskykml_version,
            "output": {
                "directory": output_directory,
                "hysplit_output_file": os.path.basename(self._hysplit_output_file),
                "smoke_dispersion_kmz_filename": files['smoke_dispersion_kmz']['name'],
                "fire_kmz_filename": files['fire_kmz']['name'],
                "fire_locations_csv_filename": files['fire_locations_csv']['name'],
                "fire_events_csv_filename": files['fire_events_csv']['name']
                # TODO: add location of image files, etc.
            }
        }