def _to_utc(self, dt): if dt: if self.__utc_offset: dt = dt - datetime.timedelta( hours=datetimeutils.parse_utc_offset(self.__utc_offset)) # else, assume zero offset return dt
def run(fires_manager): """Runs plumerise module Args: - fires_manager -- bluesky.models.fires.FiresManager object """ fires_manager.processed(__name__, __version__) if not fires_manager.met: raise ValueError(NO_MET_ERROR_MSG) arl_profiler = arlprofiler.ArlProfiler(fires_manager.met.get('files'), time_step=Config().get( 'localmet', 'time_step')) logging.debug("Extracting localmet data for %d fires", len(fires_manager.fires)) for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): # Make sure fire has at least some locations, but # iterate first through activice areas and then through # locations in order to get utc_offset and time windows if not fire.locations: raise ValueError(NO_ACTIVITY_ERROR_MSG) for aa in fire.active_areas: utc_offset = parse_utc_offset(aa.get('utc_offset')) tw = parse_datetimes(aa, 'start', 'end') for loc in aa.locations: latlng = LatLng(loc) # parse_utc_offset makes sure utc offset is defined and valid loc['localmet'] = arl_profiler.profile( latlng.latitude, latlng.longitude, tw['start'], tw['end'], utc_offset)
def _infer_time_windows_from_fires(fires_manager): time_windows = [] if fires_manager.fires: logging.debug("Met time window determined from fire activity data") # Find earliest and latest datetimes that include all fire activity periods # TODO: be more intelligent with possible gaps, so that met files for times # when no fire is growing are excluded ? for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): if 'activity' in fire: # parse_utc_offset makes sure utc offset is defined and valid for a in fire.activity: utc_offset = parse_utc_offset( a.get('location', {}).get('utc_offset')) offset = datetime.timedelta(hours=utc_offset) tw = parse_datetimes(a, 'start', 'end') if tw['start'] > tw['end']: raise ValueError( "Invalid activity time window - start: {}, end: {}" .format(tw['start'], tw['end'])) start = tw['start'] - offset end = tw['end'] - offset time_windows.append({'start': start, 'end': end}) return time_windows
def _infer_time_windows_from_fires(fires_manager): time_windows = [] if fires_manager.fires: logging.debug("Met time window determined from fire activity data") # Find earliest and latest datetimes that include all fire activity periods # TODO: be more intelligent with possible gaps, so that met files for times # when no fire is growing are excluded ? for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): for aa in fire.active_areas: # TODO: Because of the following: # a) start, end, & utc_offset are allowed to be set # per specified point or perimeter rather than per # active area, and # b) accessing [perim|sp].[start|end|utc_offset] will fallback # on aa's values if not defined in the sp,perim, # then maybe we should iterate through the aa's locations (sp,perim) # and execute the following logic on each location. We'd need to # make sure that time window merging and other downstream logic # aren't broken or negatively affected. utc_offset = parse_utc_offset(aa.get('utc_offset')) offset = datetime.timedelta(hours=utc_offset) tw = parse_datetimes(aa, 'start', 'end') if tw['start'] > tw['end']: raise ValueError( "Invalid activity time window - start: {}, end: {}" .format(tw['start'], tw['end'])) start = tw['start'] - offset end = tw['end'] - offset time_windows.append({'start': start, 'end': end}) return time_windows
def run(fires_manager): """Runs plumerise module Args: - fires_manager -- bluesky.models.fires.FiresManager object """ logging.info("Running localmet module") fires_manager.processed(__name__, __version__) if not fires_manager.met: raise ValueError("Specify met files to use in localmet") arl_profiler = ArlProfiler(fires_manager.met.get('files'), time_step=Config.get('localmet', 'time_step')) logging.debug("Extracting localmet data for %d fires", len(fires_manager.fires)) for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): if not fire.get('activity'): raise ValueError("Missing activity data required for localmet") for a in fire['activity']: latlng = LatLng(a.get('location')) # parse_utc_offset makes sure utc offset is defined and valid utc_offset = parse_utc_offset( a.get('location', {}).get('utc_offset')) tw = parse_datetimes(a, 'start', 'end') a['localmet'] = arl_profiler.profile(latlng.latitude, latlng.longitude, tw['start'], tw['end'], utc_offset)
def _f(fire, working_dir): # TODO: create and change to working directory here (per fire), # above (one working dir per all fires), or below (per activity # window)...or just let plumerise create temp workingdir (as # it's currently doing? for aa in fire.active_areas: start = aa.get('start') if not start: raise ValueError(MISSING_START_TIME_ERROR_MSG) start = datetimeutils.parse_datetime(aa.get('start'), 'start') if not aa.get('timeprofile'): raise ValueError(MISSING_TIMEPROFILE_ERROR_MSG) for loc in aa.locations: if not loc.get('consumption', {}).get('summary'): raise ValueError(MISSING_CONSUMPTION_ERROR_MSG) # Fill in missing sunrise / sunset if any([ loc.get(k) is None for k in ('sunrise_hour', 'sunset_hour') ]): # default: UTC utc_offset = datetimeutils.parse_utc_offset( loc.get('utc_offset', 0.0)) # Use NOAA-standard sunrise/sunset calculations latlng = locationutils.LatLng(loc) s = sun.Sun(lat=latlng.latitude, lng=latlng.longitude) d = start.date() # just set them both, even if one is already set loc["sunrise_hour"] = s.sunrise_hr(d, utc_offset) loc["sunset_hour"] = s.sunset_hr(d, utc_offset) fire_working_dir = _get_fire_working_dir(fire, working_dir) plumerise_data = pr.compute(aa['timeprofile'], loc['consumption']['summary'], loc, working_dir=fire_working_dir) loc['plumerise'] = plumerise_data['hours'] if config.get("load_heat"): if 'fuelbeds' not in loc: raise ValueError( "Fuelbeds should exist before loading heat in plumerise" ) loc["fuelbeds"][0]["heat"] = _loadHeat( fire_working_dir)
def _filter(fire, active_area): if not isinstance(active_area, dict): self._fail_fire(fire, self.MISSING_FIRE_LOCATION_INFO_MSG) elif not active_area.get('start') or not active_area.get('end'): self._fail_fire(fire, self.MISSING_FIRE_LOCATION_INFO_MSG) utc_offset = datetime.timedelta( hours=parse_utc_offset(active_area.get('utc_offset') or 0)) aa_s = to_datetime(active_area['start']) # check if e_is_local, since we're comparing aa_s against e if not e_is_local: aa_s = aa_s - utc_offset aa_e = to_datetime(active_area['end']) # same thing, but s_is_local if not s_is_local: aa_e = aa_e - utc_offset # note that this filters if aa's start/end matches cutoff # (e.g. if aa's start and filter's end are both 2019-01-01T00:00:00) return (s and aa_e <= s) or (e and aa_s >= e)
def _f(fire): # TODO: create and change to working directory here (per fire), # above (one working dir per all fires), or below (per activity # window)...or just let plumerise create temp workingdir (as # it's currently doing? for a in fire.activity: if not a.get('consumption', {}).get('summary'): raise ValueError("Missing fire activity consumption data " "required for FEPS plumerise") # Fill in missing sunrise / sunset if any([a['location'].get(k) is None for k in ('sunrise_hour', 'sunset_hour')]): start = datetimeutils.parse_datetime(a['start'], 'start') if not start: raise ValueError("Missing fire activity start time " "required by FEPS plumerise") # default: UTC utc_offset = datetimeutils.parse_utc_offset( a['location'].get('utc_offset', 0.0)) # Use NOAA-standard sunrise/sunset calculations latlng = locationutils.LatLng(a['location']) s = sun.Sun(lat=latlng.latitude, lng=latlng.longitude) d = start.date() # just set them both, even if one is already set a['location']["sunrise_hour"] = s.sunrise_hr(d, utc_offset) a['location']["sunset_hour"] = s.sunset_hr(d, utc_offset) if not a.get('timeprofile'): raise ValueError("Missing timeprofile data required for " "computing FEPS plumerise") plumerise_data = pr.compute(a['timeprofile'], a['consumption']['summary'], a['location'], working_dir=_get_working_dir(fire)) a['plumerise'] = plumerise_data['hours']
def _parse_date_time(self, date_time): """Parses 'date_time' field, found in BSF fire data Note: older BSF fire data formatted the date_time field without local timezone information, mis-representing everything as UTC. E.g.: '201405290000Z' Newer (and current) SF2 fire data formats date_time like so: '201508040000-04:00' Another newer format: 2020-05-13T00:00:00.000-07:00 With true utc offset embedded in the string. """ start = None utc_offset = None if date_time: try: for matcher, fmt in self.DATE_TIME_MATCHERS: m = matcher.match(date_time) if m: start = datetime.datetime.strptime(m.group(1), fmt) if len(m.groups()) > 1: utc_offset = parse_utc_offset(m.groups()[-1]) break except Exception as e: logging.warn("Failed to parse 'date_time' value %s", date_time) logging.debug(traceback.format_exc()) return start, utc_offset
def _parse_date_time(self, date_time): """Parses 'date_time' field, found in BSF fire data Note: older BSF fire data formatted the date_time field without local timezone information, mis-representing everything as UTC. E.g.: '201405290000Z' Newer (and current) SF2 fire data formats date_time like so: '201508040000-04:00' With true utc offset embedded in the string. """ start = None utc_offset = None if date_time: try: m = self.DATE_TIME_MATCHER.match(date_time) if m: start = datetime.datetime.strptime(m.group(1), self.DATE_TIME_FMT) utc_offset = parse_utc_offset(m.group(3)) else: m = self.OLD_DATE_TIME_MATCHER.match(date_time) if m: start = datetime.datetime.strptime( m.group(1), self.DATE_TIME_FMT) # Note: we don't know utc offset; don't set except Exception as e: logging.warn("Failed to parse 'date_time' value %s", date_time) logging.debug(traceback.format_exc()) return start, utc_offset
def _get_utc_offset(self, location): utc_offset = location.get('utc_offset') if utc_offset: return datetime.timedelta(hours=parse_utc_offset(utc_offset)) else: return datetime.timedelta(0)
def run(fires_manager): """Runs plumerise module Args: - fires_manager -- bluesky.models.fires.FiresManager object """ fires_manager.processed(__name__, __version__) if not fires_manager.met: raise ValueError(NO_MET_ERROR_MSG) start_utc = None end_utc = None # keep array of references to locations passed into arlprofiler, # to update with local met data after bulk profiler is called locations = [] # actual array of locations to pass into arlprofiler profiler_locations = [] for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): # Make sure fire has at least some locations, but # iterate first through activice areas and then through # locations in order to get utc_offset and time windows if not fire.locations: raise ValueError(NO_ACTIVITY_ERROR_MSG) for aa in fire.active_areas: # parse_utc_offset makes sure utc offset is defined and valid utc_offset = parse_utc_offset(aa.get('utc_offset')) tw = parse_datetimes(aa, 'start', 'end') # subtract utc_offset, since we want to get back to utc loc_start_utc = tw['start'] - datetime.timedelta( hours=utc_offset) start_utc = min(start_utc, loc_start_utc) if start_utc else loc_start_utc loc_end_utc = tw['end'] - datetime.timedelta(hours=utc_offset) end_utc = min(end_utc, loc_end_utc) if end_utc else loc_end_utc for loc in aa.locations: latlng = LatLng(loc) p_loc = { 'latitude': latlng.latitude, 'longitude': latlng.longitude } locations.append(loc) profiler_locations.append(p_loc) if len(locations) != len(profiler_locations): raise RuntimeError(FAILED_TO_COMPILE_INPUT_ERROR_MSG) if not start_utc or not end_utc: raise RuntimeError(NO_START_OR_END_ERROR_MSG) arl_profiler = arlprofiler.ArlProfiler(fires_manager.met.get('files'), time_step=Config().get( 'localmet', 'time_step')) logging.debug("Extracting localmet data for %d locations", len(profiler_locations)) localmet = arl_profiler.profile(start_utc, end_utc, profiler_locations) if len(localmet) != len(locations): raise RuntimeError(PROFILER_RUN_ERROR_MSG) for i in range(len(localmet)): locations[i]['localmet'] = localmet[i]
def _get_utc_offset(self, aa): utc_offset = aa.get('utc_offset') return parse_utc_offset(utc_offset) if utc_offset else 0.0
def _run(self, wdir): """Runs vsmoke args: - wdir -- working directory """ self._set_input_file_vars(wdir) self._set_kml_vars(wdir) # For each fire run VSMOKE and VSMOKEGIS for fire in self._fires: # TODO: check to make sure start+num_hours is within fire's # activity windows in_var = INPUTVariables(fire) utc_offset = fire.get('location', {}).get('utc_offset') utc_offset = parse_utc_offset(utc_offset) if utc_offset else 0.0 # TODO: remove following line nad just rename 'timezone' to # 'utc_offset' in all subsequent code timezone = utc_offset # Get emissions for fire if not fire.emissions or not fire.consumption: continue logging.debug("%d hour run time for fireID %s", self._num_hours, fire["id"]) # Run VSMOKE GIS for each hour for hr in range(self._num_hours): local_dt = self._compute_local_dt(fire, hr) self._write_iso_input(fire, local_dt, in_var) self._execute(self.BINARIES['VSMOKEGIS'], working_dir=wdir) # TODO: replace 'hr' with 'local_dt' suffix = "{}_hour{}".format(fire.id, str(hr + 1)) self._archive_file("vsmkgs.iso", src_dir=wdir, suffix=suffix) self._archive_file("vsmkgs.opt", src_dir=wdir, suffix=suffix) self._archive_file("vsmkgs.ipt", src_dir=wdir, suffix=suffix) iso_file = os.path.join(wdir, "vsmkgs.iso") # Make KML file kml_name = in_var.fireID + "_" + str(hr + 1) + ".kml" kml_path = os.path.join(wdir, kml_name) self._build_kml(kml_path, in_var, iso_file) self._kmz_files.append(kml_path) self._my_kmz.add_kml(kml_name, fire, hr) pm25 = fire.timeprofiled_emissions[local_dt]['PM2.5'] self._add_geo_json(in_var, iso_file, fire['id'], timezone, hr, pm25) # Write input files self._write_input(fire, in_var) # Run VSMOKE for fire self._execute(self.BINARIES['VSMOKE'], working_dir=wdir) # Rename input and output files and archive self._archive_file("VSMOKE.IPT", src_dir=wdir, suffix=fire.id) self._archive_file("VSMOKE.OUT", src_dir=wdir, suffix=fire.id) # Make KMZ file self._my_kmz.write() z = zipfile.ZipFile(self._kmz_filename, 'w', zipfile.ZIP_DEFLATED) for kml in self._kmz_files: if os.path.exists(kml): z.write(kml, os.path.basename(kml)) else: logging.error( 'Failure while trying to write KMZ file -- KML file does not exist' ) logging.debug('File "%s" does not exist', kml) z.write(self._legend_image, os.path.basename(self._legend_image)) z.close() r = {"output": {"kmz_filename": self._kmz_filename}} json_file_name = self._create_geo_json(wdir) if json_file_name: r['output']['json_file_name'] = json_file_name # TODO: anytheing else to include in response return r
def _set_fire_data(self, fires): self._fires = [] # TODO: aggreagating over all fires (if psossible) # use self.model_start and self.model_end # as disperion time window, and then look at # activity window(s) of each fire to fill in emissions for each # fire spanning hysplit time window # TODO: determine set of arl fires by aggregating arl files # specified per activity per fire, or expect global arl files # specifications? (if aggregating over fires, make sure they're # conistent with met domain; if not, raise exception or run them # separately...raising exception would be easier for now) # Make sure met files span dispersion time window for fire in fires: try: if 'activity' not in fire: raise ValueError( "Missing fire activity data required for computing dispersion") activity_fields = self._required_activity_fields() + ('fuelbeds', 'location') for a in fire.activity: if any([not a.get(f) for f in activity_fields]): raise ValueError("Each activity window must have {} in " "order to compute {} dispersion".format( ','.join(activity_fields), self.__class__.__name__)) if any([not fb.get('emissions') for fb in a['fuelbeds']]): raise ValueError( "Missing emissions data required for computing dispersion") # TDOO: handle case where heat is defined by phase, but not total # (just make sure each phase is defined, and set total to sum) heat = None heat_values = list(itertools.chain.from_iterable( [fb.get('heat', {}).get('total', [None]) for fb in a['fuelbeds']])) if not any([v is None for v in heat_values]): heat = sum(heat_values) if heat < 1.0e-6: logging.debug("Fire %s activity window %s - %s has " "less than 1.0e-6 total heat; skip...", fire.id, a['start'], a['end']) continue # else, just forget about heat utc_offset = a.get('location', {}).get('utc_offset') utc_offset = parse_utc_offset(utc_offset) if utc_offset else 0.0 # TODO: only include plumerise and timeprofile keys within model run # time window; and somehow fill in gaps (is this possible?) all_plumerise = a.get('plumerise', {}) all_timeprofile = a.get('timeprofile', {}) plumerise = {} timeprofile = {} for i in range(self._num_hours): local_dt = self._model_start + timedelta(hours=(i + utc_offset)) # TODO: will all_plumerise and all_timeprofile always # have string value keys local_dt = local_dt.strftime('%Y-%m-%dT%H:%M:%S') plumerise[local_dt] = all_plumerise.get(local_dt) or self.MISSING_PLUMERISE_HOUR timeprofile[local_dt] = all_timeprofile.get(local_dt) or self.MISSING_TIMEPROFILE_HOUR # sum the emissions across all fuelbeds, but keep them separate by phase emissions = {p: {} for p in PHASES} for fb in a['fuelbeds']: for p in PHASES: for s in fb['emissions'][p]: emissions[p][s] = (emissions[p].get(s, 0.0) + sum(fb['emissions'][p][s])) timeprofiled_emissions = {} for dt in timeprofile: timeprofiled_emissions[dt] = {} for e in self.SPECIES: timeprofiled_emissions[dt][e] = sum([ timeprofile[dt][p]*emissions[p].get('PM2.5', 0.0) for p in PHASES ]) # consumption = datautils.sum_nested_data( # [fb.get("consumption", {}) for fb in a['fuelbeds']], 'summary', 'total') consumption = a['consumption']['summary'] latlng = locationutils.LatLng(a['location']) f = Fire( id=fire.id, meta=fire.get('meta', {}), start=a['start'], area=a['location']['area'], latitude=latlng.latitude, longitude=latlng.longitude, utc_offset=utc_offset, plumerise=plumerise, timeprofile=timeprofile, emissions=emissions, timeprofiled_emissions=timeprofiled_emissions, consumption=consumption ) if heat: f['heat'] = heat self._fires.append(f) except: if self.config('skip_invalid_fires'): continue else: raise
def _get_utc_offset(self, a): utc_offset = a.get('location', {}).get('utc_offset') if utc_offset: return datetime.timedelta(hours=parse_utc_offset(utc_offset)) else: return datetime.timedelta(0)